mirror of
https://github.com/mvt-project/mvt.git
synced 2026-02-14 09:32:51 +00:00
Compare commits
304 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
93094367c7 | ||
|
|
e8fa9c6eea | ||
|
|
79a01c45cc | ||
|
|
a440d12377 | ||
|
|
8085888c0c | ||
|
|
c2617fe778 | ||
|
|
2e1243864c | ||
|
|
ba5ff9b38c | ||
|
|
3fccebe132 | ||
|
|
1265b366c1 | ||
|
|
c944fb3234 | ||
|
|
e6b4d17027 | ||
|
|
f55ac36189 | ||
|
|
550d6037a6 | ||
|
|
e875c978c9 | ||
|
|
fbf510567c | ||
|
|
94fe98b9ec | ||
|
|
a328d57551 | ||
|
|
a9eabc5d9d | ||
|
|
1ed6140cb6 | ||
|
|
efceb777f0 | ||
|
|
14bbbd9e45 | ||
|
|
3cdc6da428 | ||
|
|
459ff8c51c | ||
|
|
88665cf7dd | ||
|
|
0a749da85f | ||
|
|
f81604133a | ||
|
|
cdd9b74cbc | ||
|
|
3fb37b4f30 | ||
|
|
2fe8b58c09 | ||
|
|
61d0c4134d | ||
|
|
6b36fe5fca | ||
|
|
c9f54947e3 | ||
|
|
ae6fec5ac5 | ||
|
|
298726ab2b | ||
|
|
7222bc82e1 | ||
|
|
4a568835d2 | ||
|
|
f98282d6c5 | ||
|
|
f864adf97e | ||
|
|
8f6882b0ff | ||
|
|
b6531e3e70 | ||
|
|
ef662c1145 | ||
|
|
b8e5346660 | ||
|
|
aedef123c9 | ||
|
|
8ff8e599d8 | ||
|
|
815cdc0a88 | ||
|
|
b420d828ee | ||
|
|
7b92903536 | ||
|
|
2bde693c35 | ||
|
|
7daea737c6 | ||
|
|
0d75dc3ba0 | ||
|
|
0622357a64 | ||
|
|
c4f91ba28b | ||
|
|
5ade0657ac | ||
|
|
cca9083dff | ||
|
|
3f4ddaaa0c | ||
|
|
7024909e05 | ||
|
|
3899dce353 | ||
|
|
4830aa5a6c | ||
|
|
3608576417 | ||
|
|
043c234401 | ||
|
|
8663c78b63 | ||
|
|
b847683717 | ||
|
|
09400a2847 | ||
|
|
2bc6fbef2f | ||
|
|
b77749e6ba | ||
|
|
1643454190 | ||
|
|
c2f1fe718d | ||
|
|
444ecf032d | ||
|
|
dd230c2407 | ||
|
|
cd87b6ed31 | ||
|
|
6f50af479d | ||
|
|
36a67911b3 | ||
|
|
2dbfef322a | ||
|
|
fba4e27757 | ||
|
|
abc0f2768b | ||
|
|
e7fe30e201 | ||
|
|
c54a01ca59 | ||
|
|
a12c4e6b93 | ||
|
|
a9be771f79 | ||
|
|
a7d35dba4a | ||
|
|
3a6e4a7001 | ||
|
|
bb0e41e949 | ||
|
|
6844f0b90b | ||
|
|
fb2a0ba668 | ||
|
|
e34f8f3660 | ||
|
|
fd3f9dba8f | ||
|
|
27f0364c1d | ||
|
|
8dac714214 | ||
|
|
732a712e3d | ||
|
|
6d278d4bec | ||
|
|
c39b4d2179 | ||
|
|
a653fd5253 | ||
|
|
f754bf274d | ||
|
|
fcac8a8c7d | ||
|
|
d82c788a18 | ||
|
|
946a9ef02b | ||
|
|
c343eed5a0 | ||
|
|
6162a1e1f2 | ||
|
|
f61729deed | ||
|
|
7a00e88f1f | ||
|
|
ff41efba72 | ||
|
|
26e6a00bf5 | ||
|
|
9d61b9048c | ||
|
|
9950b3d6c2 | ||
|
|
e0d30ea990 | ||
|
|
293752f90a | ||
|
|
ac1e5c29d3 | ||
|
|
d868e6d9f0 | ||
|
|
f5cb7f06e1 | ||
|
|
5ce8035820 | ||
|
|
e3a8bde150 | ||
|
|
d6af7c8cca | ||
|
|
6584d8232c | ||
|
|
3487078c03 | ||
|
|
bc5d386be7 | ||
|
|
03efc8494b | ||
|
|
0b3f529cfa | ||
|
|
9bdef6ede4 | ||
|
|
fc9a27d030 | ||
|
|
f5f3660d82 | ||
|
|
712f5bcb9b | ||
|
|
ac26aa964a | ||
|
|
be511dcb51 | ||
|
|
b44c67e699 | ||
|
|
a4d08f8f35 | ||
|
|
6cc67f3c1d | ||
|
|
0d5377597f | ||
|
|
86c79075ff | ||
|
|
9940b1d145 | ||
|
|
b07fb092aa | ||
|
|
639c163297 | ||
|
|
8eb30e3a02 | ||
|
|
cd0e7d9879 | ||
|
|
bdaaf15434 | ||
|
|
699824d9ff | ||
|
|
8cca78d222 | ||
|
|
57cbb0ed56 | ||
|
|
e9cc6b3928 | ||
|
|
6d47d4d416 | ||
|
|
ed54761747 | ||
|
|
71c4ba799f | ||
|
|
09a6f291c0 | ||
|
|
b50be69dd4 | ||
|
|
6fc6102b73 | ||
|
|
3fe5d8dc8d | ||
|
|
fec6210d1b | ||
|
|
6a723e533f | ||
|
|
ed8a5a3845 | ||
|
|
04225a4455 | ||
|
|
5987f218be | ||
|
|
748780476e | ||
|
|
c522b54326 | ||
|
|
0e0e346916 | ||
|
|
69daf3c3cd | ||
|
|
998d87900d | ||
|
|
230f81879a | ||
|
|
df42efb7cb | ||
|
|
0922e569b0 | ||
|
|
03092cf3b7 | ||
|
|
ab63a02c9f | ||
|
|
a833dda581 | ||
|
|
189b1d7fc6 | ||
|
|
b1b282ac20 | ||
|
|
512c349c2c | ||
|
|
b94ba28873 | ||
|
|
564efc3629 | ||
|
|
9c62e6e4d6 | ||
|
|
153f6cce02 | ||
|
|
47f9a0104c | ||
|
|
bdad23feee | ||
|
|
5416b66915 | ||
|
|
e2936c3d33 | ||
|
|
3483ca1584 | ||
|
|
7b107edf1f | ||
|
|
b97ce7651a | ||
|
|
52a204cab6 | ||
|
|
1b335fda1d | ||
|
|
2ad175eae2 | ||
|
|
2d00dca5bd | ||
|
|
c8e50eb958 | ||
|
|
1f049fc8ba | ||
|
|
434738a306 | ||
|
|
06cd640c5e | ||
|
|
fb8a7ca104 | ||
|
|
8d15ff58dd | ||
|
|
eb5f07a75d | ||
|
|
ececf1a6b2 | ||
|
|
851cd52602 | ||
|
|
8db04fc991 | ||
|
|
3d0ba56e1f | ||
|
|
c48a4e8f50 | ||
|
|
001c2998a5 | ||
|
|
5e7c5727af | ||
|
|
883fbaeb88 | ||
|
|
6f0012cede | ||
|
|
458e80ccbb | ||
|
|
c8185fdbd8 | ||
|
|
67eea3edec | ||
|
|
bc86d159b8 | ||
|
|
43b1612dfe | ||
|
|
156f1084f1 | ||
|
|
49e34f6299 | ||
|
|
d88a66dd54 | ||
|
|
d3ed778ae4 | ||
|
|
4c3306c272 | ||
|
|
1c912f68fe | ||
|
|
10a640d3f7 | ||
|
|
c3acc95e9e | ||
|
|
90d05336da | ||
|
|
5513e6e9e3 | ||
|
|
38116f8405 | ||
|
|
59b069f006 | ||
|
|
28e1348aa7 | ||
|
|
034338d1f4 | ||
|
|
09d5eabf2f | ||
|
|
a425d6c511 | ||
|
|
f8897a4f8c | ||
|
|
86eae68bdb | ||
|
|
d2bf348b03 | ||
|
|
25c6c03075 | ||
|
|
cf88740f6a | ||
|
|
eb4810b0ad | ||
|
|
cce9159eda | ||
|
|
e1211991aa | ||
|
|
8ae9ca328c | ||
|
|
0e2eb51732 | ||
|
|
b35cd4bc73 | ||
|
|
1b4f99a31d | ||
|
|
e4e1716729 | ||
|
|
083bc12351 | ||
|
|
cf6d392460 | ||
|
|
95205d8e17 | ||
|
|
1460828c30 | ||
|
|
fa84b3f296 | ||
|
|
e1efaa5467 | ||
|
|
696d42fc6e | ||
|
|
a0e1662726 | ||
|
|
51645bdbc0 | ||
|
|
bb1b108fd7 | ||
|
|
92f9dcb8a5 | ||
|
|
a6fd5fe1f3 | ||
|
|
3e0ef20fcd | ||
|
|
01f3acde2e | ||
|
|
b697874f56 | ||
|
|
41d699f457 | ||
|
|
6fcd40f6b6 | ||
|
|
38bb583a9e | ||
|
|
48ec2d8fa8 | ||
|
|
798805c583 | ||
|
|
24be9e9570 | ||
|
|
adbd95c559 | ||
|
|
8a707c288a | ||
|
|
4c906ad52e | ||
|
|
a2f8030cce | ||
|
|
737007afdb | ||
|
|
33efeda90a | ||
|
|
146f2ae57d | ||
|
|
11bc916854 | ||
|
|
3084876f31 | ||
|
|
f63cb585b2 | ||
|
|
637aebcd89 | ||
|
|
16a0de3af4 | ||
|
|
15fbedccc9 | ||
|
|
e0514b20dd | ||
|
|
b2e9f0361b | ||
|
|
e85c70c603 | ||
|
|
3f8dade610 | ||
|
|
54963b0b59 | ||
|
|
513e2cc704 | ||
|
|
28d57e7178 | ||
|
|
dc8eeb618e | ||
|
|
c282d4341d | ||
|
|
681bae2f66 | ||
|
|
b079246c8a | ||
|
|
82b57f1997 | ||
|
|
8f88f872df | ||
|
|
2d16218489 | ||
|
|
3215e797ec | ||
|
|
e65a598903 | ||
|
|
e80c02451c | ||
|
|
5df50f864c | ||
|
|
45b31bb718 | ||
|
|
e10f1767e6 | ||
|
|
d64277c0bf | ||
|
|
3f3261511a | ||
|
|
4cfe75e2d4 | ||
|
|
cdd90332f7 | ||
|
|
d9b29b3739 | ||
|
|
79bb7d1d4b | ||
|
|
a653cb3cfc | ||
|
|
b25cc48be0 | ||
|
|
40bd9ddc1d | ||
|
|
deb95297da | ||
|
|
02014b414b | ||
|
|
7dd5fe7831 | ||
|
|
11d1a3dcee | ||
|
|
74f9db2bf2 | ||
|
|
356bddc3af | ||
|
|
512f40dcb4 | ||
|
|
b3a464ba58 | ||
|
|
529df85f0f | ||
|
|
19a6da8fe7 | ||
|
|
34c997f923 |
26
.github/workflows/flake8.yml
vendored
Normal file
26
.github/workflows/flake8.yml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: Flake8
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- '*.py'
|
||||
|
||||
jobs:
|
||||
flake8_py3:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.9
|
||||
architecture: x64
|
||||
- name: Checkout
|
||||
uses: actions/checkout@master
|
||||
- name: Install flake8
|
||||
run: pip install flake8
|
||||
- name: Run flake8
|
||||
uses: suo/flake8-github-action@releases/v1
|
||||
with:
|
||||
checkName: 'flake8_py3' # NOTE: this needs to be the same as the job name
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
14
.github/workflows/python-package.yml
vendored
14
.github/workflows/python-package.yml
vendored
@@ -1,7 +1,7 @@
|
||||
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
|
||||
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
|
||||
|
||||
name: Python package
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
@@ -16,7 +16,8 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: [3.7, 3.8, 3.9]
|
||||
# python-version: [3.7, 3.8, 3.9]
|
||||
python-version: [3.8, 3.9]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -27,8 +28,9 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install flake8 pytest safety
|
||||
python -m pip install flake8 pytest safety stix2 pytest-mock
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
python -m pip install .
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
@@ -37,7 +39,5 @@ jobs:
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
- name: Safety checks
|
||||
run: safety check
|
||||
|
||||
# - name: Test with pytest
|
||||
# run: |
|
||||
# pytest
|
||||
- name: Test with pytest
|
||||
run: pytest
|
||||
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -131,3 +131,9 @@ dmypy.json
|
||||
|
||||
# Temporal files
|
||||
*~
|
||||
|
||||
# IDEA Dev Environment
|
||||
.idea
|
||||
|
||||
# Sublime Text project files
|
||||
*.sublime*
|
||||
7
AUTHORS
7
AUTHORS
@@ -1,7 +0,0 @@
|
||||
MVT was originally authored by Claudio Guarnieri <nex@nex.sx>.
|
||||
|
||||
For an up-to-date list of all contributors visit:
|
||||
https://github.com/mvt-project/mvt/graphs/contributors
|
||||
|
||||
Or run:
|
||||
git shortlog -s -n
|
||||
3
Makefile
3
Makefile
@@ -8,3 +8,6 @@ dist:
|
||||
|
||||
upload:
|
||||
python3 -m twine upload dist/*
|
||||
|
||||
test-upload:
|
||||
python3 -m twine upload --repository testpypi dist/*
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
<p align="center">
|
||||
<img src="./docs/mvt.png" width="200" />
|
||||
<img src="https://docs.mvt.re/en/latest/mvt.png" width="200" />
|
||||
</p>
|
||||
|
||||
# Mobile Verification Toolkit
|
||||
|
||||
[](https://pypi.org/project/mvt/)
|
||||
[](https://docs.mvt.re/en/latest/?badge=latest)
|
||||
[](https://github.com/mvt-project/mvt/actions/workflows/python-package.yml)
|
||||
[](https://pepy.tech/project/mvt)
|
||||
|
||||
Mobile Verification Toolkit (MVT) is a collection of utilities to simplify and automate the process of gathering forensic traces helpful to identify a potential compromise of Android and iOS devices.
|
||||
|
||||
@@ -13,6 +15,7 @@ It has been developed and released by the [Amnesty International Security Lab](h
|
||||
|
||||
*Warning*: MVT is a forensic research tool intended for technologists and investigators. Using it requires understanding the basics of forensic analysis and using command-line tools. This is not intended for end-user self-assessment. If you are concerned with the security of your device please seek expert assistance.
|
||||
|
||||
|
||||
## Installation
|
||||
|
||||
MVT can be installed from sources or from [PyPi](https://pypi.org/project/mvt/) (you will need some dependencies, check the [documentation](https://docs.mvt.re/en/latest/install/)):
|
||||
@@ -21,14 +24,14 @@ MVT can be installed from sources or from [PyPi](https://pypi.org/project/mvt/)
|
||||
pip3 install mvt
|
||||
```
|
||||
|
||||
Alternatively, you can decide to run MVT and all relevant tools through a [Docker container](https://docs.mvt.re/en/latest/docker/).
|
||||
For alternative installation options and known issues, please refer to the [documentation](https://docs.mvt.re/en/latest/install/) as well as [GitHub Issues](https://github.com/mvt-project/mvt/issues).
|
||||
|
||||
**Please note:** MVT is best run on Linux or Mac systems. [It does not currently support running natively on Windows.](https://docs.mvt.re/en/latest/install/#mvt-on-windows)
|
||||
|
||||
## Usage
|
||||
|
||||
MVT provides two commands `mvt-ios` and `mvt-android`. [Check out the documentation to learn how to use them!](https://docs.mvt.re/)
|
||||
|
||||
|
||||
## License
|
||||
|
||||
The purpose of MVT is to facilitate the ***consensual forensic analysis*** of devices of those who might be targets of sophisticated mobile spyware attacks, especially members of civil society and marginalized communities. We do not want MVT to enable privacy violations of non-consenting individuals. In order to achieve this, MVT is released under its own license. [Read more here.](https://docs.mvt.re/en/latest/license/)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
|
||||
@@ -11,18 +11,37 @@ That said, most versions of Android should still allow to locally backup SMS mes
|
||||
Because `mvt-android check-backup` currently only supports checking SMS messages, you can indicate to backup only those:
|
||||
|
||||
```bash
|
||||
adb backup com.android.providers.telephony
|
||||
adb backup -nocompress com.android.providers.telephony
|
||||
```
|
||||
|
||||
In case you nonetheless wish to take a full backup, you can do so with
|
||||
|
||||
```bash
|
||||
adb backup -all
|
||||
adb backup -nocompress -all
|
||||
```
|
||||
|
||||
## Unpack the backup
|
||||
Some recent phones will enforce the utilisation of a password to encrypt the backup archive. In that case, the password will obviously be needed to extract and analyse the data later on.
|
||||
|
||||
In order to unpack the backup, use [Android Backup Extractor (ABE)](https://github.com/nelenkov/android-backup-extractor) to convert it to a readable file format. Make sure that java is installed on your system and use the following command:
|
||||
## Unpack and check the backup
|
||||
|
||||
MVT includes a partial implementation of the Android Backup parsing, because of the implementation difference in the compression algorithm between Java and Python. The `-nocompress` option passed to adb in the section above allows to avoid this issue. You can analyse and extract SMSs containing links from the backup directly with MVT:
|
||||
|
||||
```bash
|
||||
$ mvt-android check-backup --output /path/to/results/ /path/to/backup.ab
|
||||
14:09:45 INFO [mvt.android.cli] Checking ADB backup located at: backup.ab
|
||||
INFO [mvt.android.modules.backup.sms] Running module SMS...
|
||||
INFO [mvt.android.modules.backup.sms] Processing SMS backup file at
|
||||
apps/com.android.providers.telephony/d_f/000000_sms_backup
|
||||
INFO [mvt.android.modules.backup.sms] Extracted a total of 64 SMS messages containing links
|
||||
```
|
||||
|
||||
If the backup is encrypted, MVT will prompt you to enter the password.
|
||||
|
||||
Through the `--iocs` argument you can specify a [STIX2](https://oasis-open.github.io/cti-documentation/stix/intro) file defining a list of malicious indicators to check against the records extracted from the backup by MVT. Any matches will be highlighted in the terminal output.
|
||||
|
||||
## Alternative ways to unpack and check the backup
|
||||
|
||||
If you encounter an issue during the analysis of the backup, you can alternatively use [Android Backup Extractor (ABE)](https://github.com/nelenkov/android-backup-extractor) to convert it to a readable file format. Make sure that java is installed on your system and use the following command:
|
||||
|
||||
```bash
|
||||
java -jar ~/path/to/abe.jar unpack backup.ab backup.tar
|
||||
@@ -33,17 +52,4 @@ If the backup is encrypted, ABE will prompt you to enter the password.
|
||||
|
||||
Alternatively, [ab-decrypt](https://github.com/joernheissler/ab-decrypt) can be used for that purpose.
|
||||
|
||||
## Check the backup
|
||||
|
||||
You can then extract SMSs containing links with MVT:
|
||||
|
||||
```bash
|
||||
$ mvt-android check-backup --output /path/to/results/ /path/to/backup/
|
||||
16:18:38 INFO [mvt.android.cli] Checking ADB backup located at: .
|
||||
INFO [mvt.android.modules.backup.sms] Running module SMS...
|
||||
INFO [mvt.android.modules.backup.sms] Processing SMS backup file at /path/to/backup/apps/com.android.providers.telephony/d_f/000000_sms_backup
|
||||
16:18:39 INFO [mvt.android.modules.backup.sms] Extracted a total of
|
||||
64 SMS messages containing links
|
||||
```
|
||||
|
||||
Through the `--iocs` argument you can specify a [STIX2](https://oasis-open.github.io/cti-documentation/stix/intro) file defining a list of malicious indicators to check against the records extracted from the backup by MVT. Any matches will be highlighted in the terminal output.
|
||||
You can then extract SMSs containing links with MVT by passing the folder path as parameter instead of the `.ab` file: `mvt-android check-backup --output /path/to/results/ /path/to/backup/` (the path to backup given should be the folder containing the `apps` folder).
|
||||
|
||||
@@ -13,22 +13,16 @@ It might take several minutes to complete.
|
||||
!!! info
|
||||
MVT will likely warn you it was unable to download certain installed packages. There is no reason to be alarmed: this is typically expected behavior when MVT attempts to download a system package it has no privileges to access.
|
||||
|
||||
Optionally, you can decide to enable lookups of the SHA256 hash of all the extracted APKs on [VirusTotal](https://www.virustotal.com) and/or [Koodous](https://koodous.com). While these lookups do not provide any conclusive assessment on all of the extracted APKs, they might highlight any known malicious ones:
|
||||
Optionally, you can decide to enable lookups of the SHA256 hash of all the extracted APKs on [VirusTotal](https://www.virustotal.com). While these lookups do not provide any conclusive assessment on all of the extracted APKs, they might highlight any known malicious ones:
|
||||
|
||||
```bash
|
||||
mvt-android download-apks --output /path/to/folder --virustotal
|
||||
mvt-android download-apks --output /path/to/folder --koodous
|
||||
MVT_VT_API_KEY=<key> mvt-android download-apks --output /path/to/folder --virustotal
|
||||
```
|
||||
|
||||
Or, to launch all available lookups:
|
||||
Please note that in order to use VirusTotal lookups you are required to provide your own API key through the `MVT_VT_API_KEY` environment variable. You should also note that VirusTotal enforces strict API usage. Be mindful that MVT might consume your hourly search quota.
|
||||
|
||||
In case you have a previous extraction of APKs you want to later check against VirusTotal, you can do so with the following arguments:
|
||||
|
||||
```bash
|
||||
mvt-android download-apks --output /path/to/folder --all-checks
|
||||
MVT_VT_API_KEY=<key> mvt-android download-apks --from-file /path/to/folder/apks.json --virustotal
|
||||
```
|
||||
|
||||
In case you have a previous extraction of APKs you want to later check against VirusTotal and Koodous, you can do so with the following arguments:
|
||||
|
||||
```bash
|
||||
mvt-android download-apks --from-file /path/to/folder/apks.json --all-checks
|
||||
```
|
||||
|
||||
|
||||
@@ -8,8 +8,10 @@ However, not all is lost.
|
||||
|
||||
Because malware attacks over Android typically take the form of malicious or backdoored apps, the very first thing you might want to do is to extract and verify all installed Android packages and triage quickly if there are any which stand out as malicious or which might be atypical.
|
||||
|
||||
While it is out of the scope of this documentation to dwell into details on how to analyze Android apps, MVT does allow to easily and automatically extract information about installed apps, download copies of them, and quickly lookup services such as [VirusTotal](https://www.virustotal.com) or [Koodous](https://koodous.com) which might quickly indicate known bad apps.
|
||||
While it is out of the scope of this documentation to dwell into details on how to analyze Android apps, MVT does allow to easily and automatically extract information about installed apps, download copies of them, and quickly look them up on services such as [VirusTotal](https://www.virustotal.com).
|
||||
|
||||
!!! info "Using VirusTotal"
|
||||
Please note that in order to use VirusTotal lookups you are required to provide your own API key through the `MVT_VT_API_KEY` environment variable. You should also note that VirusTotal enforces strict API usage. Be mindful that MVT might consume your hourly search quota.
|
||||
|
||||
## Check the device over Android Debug Bridge
|
||||
|
||||
|
||||
@@ -10,6 +10,11 @@ cd mvt
|
||||
docker build -t mvt .
|
||||
```
|
||||
|
||||
Optionally, you may need to specify your platform to Docker in order to build successfully (Apple M1)
|
||||
```bash
|
||||
docker build --platform amd64 -t mvt .
|
||||
```
|
||||
|
||||
Test if the image was created successfully:
|
||||
|
||||
```bash
|
||||
|
||||
11
docs/iocs.md
11
docs/iocs.md
@@ -28,10 +28,19 @@ The `--iocs` option can be invoked multiple times to let MVT import multiple STI
|
||||
mvt-ios check-backup --iocs ~/iocs/malware1.stix --iocs ~/iocs/malware2.stix2 /path/to/backup
|
||||
```
|
||||
|
||||
It is also possible to load STIX2 files automatically from the environment variable `MVT_STIX2`:
|
||||
|
||||
```bash
|
||||
export MVT_STIX2="/home/user/IOC1.stix2:/home/user/IOC2.stix2"
|
||||
```
|
||||
|
||||
## Known repositories of STIX2 IOCs
|
||||
|
||||
- The [Amnesty International investigations repository](https://github.com/AmnestyTech/investigations) contains STIX-formatted IOCs for:
|
||||
- [Pegasus](https://en.wikipedia.org/wiki/Pegasus_(spyware)) ([STIX2](https://raw.githubusercontent.com/AmnestyTech/investigations/master/2021-07-18_nso/pegasus.stix2))
|
||||
- [This repository](https://github.com/Te-k/stalkerware-indicators) contains IOCs for Android stalkerware including [a STIX MVT-compatible file](https://github.com/Te-k/stalkerware-indicators/blob/master/stalkerware.stix2).
|
||||
- [Predator from Cytrox](https://citizenlab.ca/2021/12/pegasus-vs-predator-dissidents-doubly-infected-iphone-reveals-cytrox-mercenary-spyware/) ([STIX2](https://raw.githubusercontent.com/AmnestyTech/investigations/master/2021-12-16_cytrox/cytrox.stix2))
|
||||
- [This repository](https://github.com/Te-k/stalkerware-indicators) contains IOCs for Android stalkerware including [a STIX MVT-compatible file](https://raw.githubusercontent.com/Te-k/stalkerware-indicators/master/generated/stalkerware.stix2).
|
||||
|
||||
You can automaticallly download the latest public indicator files with the command `mvt-ios download-iocs` or `mvt-android download-iocs`. These commands download the list of indicators listed [here](https://github.com/mvt-project/mvt/blob/main/public_indicators.json) and store them in the [appdir](https://pypi.org/project/appdirs/) folder. They are then loaded automatically by MVT.
|
||||
|
||||
Please [open an issue](https://github.com/mvt-project/mvt/issues/) to suggest new sources of STIX-formatted IOCs.
|
||||
|
||||
@@ -4,9 +4,21 @@ In this page you can find a (reasonably) up-to-date breakdown of the files creat
|
||||
|
||||
## Records extracted by `check-fs` or `check-backup`
|
||||
|
||||
### `analytics.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup (if encrypted): :material-close:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `Analytics` module. The module extracts records from the plists inside the SQLite databases located at *private/var/Keychains/Analytics/\*.db*, which contain various analytics information regarding networking, certificate-pinning, TLS, etc. failures.
|
||||
|
||||
If indicators are provided through the command-line, processes and domains are checked against all fields of the plist. Any matches are stored in *analytics_detected.json*.
|
||||
|
||||
---
|
||||
|
||||
### `backup_info.json`
|
||||
|
||||
!!! info "Availabiliy"
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-close:
|
||||
|
||||
@@ -56,7 +68,7 @@ If indicators are provided through the command-line, they are checked against bo
|
||||
|
||||
This JSON file is created by mvt-ios' `ChromeHistory` module. The module extracts records from a SQLite database located at */private/var/mobile/Containers/Data/Application/\*/Library/Application Support/Google/Chrome/Default/History*, which contains a history of URL visits.
|
||||
|
||||
If indicators a provided through the command-line, they are checked against the visited URL. Any matches are stored in *chrome_history_detected.json*.
|
||||
If indicators are provided through the command-line, they are checked against the visited URL. Any matches are stored in *chrome_history_detected.json*.
|
||||
|
||||
---
|
||||
|
||||
@@ -68,6 +80,8 @@ If indicators a provided through the command-line, they are checked against the
|
||||
|
||||
This JSON file is created by mvt-ios' `ConfigurationProfiles` module. The module extracts details about iOS configuration profiles that have been installed on the device. These should include both default iOS as well as third-party profiles.
|
||||
|
||||
If indicators are provided through the command-line, they are checked against the configuration profile UUID to identify any known malicious profiles. Any matches are stored in *configuration_profiles_detected.json*.
|
||||
|
||||
---
|
||||
|
||||
### `contacts.json`
|
||||
@@ -100,7 +114,7 @@ If indicators are provided through the command-line, they are checked against bo
|
||||
|
||||
This JSON file is created by mvt-ios' `FirefoxHistory` module. The module extracts records from a SQLite database located at */private/var/mobile/profile.profile/browser.db*, which contains a history of URL visits.
|
||||
|
||||
If indicators a provided through the command-line, they are checked against the visited URL. Any matches are stored in *firefox_history_detected.json*.
|
||||
If indicators are provided through the command-line, they are checked against the visited URL. Any matches are stored in *firefox_history_detected.json*.
|
||||
|
||||
---
|
||||
|
||||
@@ -116,6 +130,16 @@ Starting from iOS 14.7.0, this file is empty or absent.
|
||||
|
||||
---
|
||||
|
||||
### `shortcuts.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `Shortcuts` module. The module extracts records from an SQLite database located at */private/var/mobile/Library/Shortcuts/Shortcuts.sqlite*, which contains records about the Shortcuts application. Shortcuts are a built-in iOS feature which allows users to automation certain actions on their device. In some cases the legitimate Shortcuts app may be abused by spyware to maintain persistence on an infected devices.
|
||||
|
||||
---
|
||||
|
||||
### `interaction_c.json`
|
||||
|
||||
!!! info "Availability"
|
||||
@@ -166,7 +190,7 @@ If indicators are provided through the command-line, they are checked against th
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `Datausage` module. The module extracts records from a SQLite database located */private/var/wireless/Library/Databases/DataUsage.sqlite*, which contains a history of data usage by processes running on the system. Besides the network statistics, these records are particularly important because they might show traces of malicious process executions and the relevant timeframe. In particular, processes which do not have a valid bundle ID might require particular attention.
|
||||
This JSON file is created by mvt-ios' `Datausage` module. The module extracts records from a SQLite database located */private/var/wireless/Library/Databases/DataUsage.sqlite*, which contains a history of network data usage by processes running on the system. It does not log network traffic through WiFi (the fields `WIFI_IN` and `WIFI_OUT` are always empty), and the `WWAN_IN` and `WWAN_OUT` fields are stored in bytes. Besides the network statistics, these records are particularly important because they might show traces of malicious process executions and the relevant timeframe. In particular, processes which do not have a valid bundle ID might require particular attention.
|
||||
|
||||
If indicators are provided through the command-line, they are checked against the process names. Any matches are stored in *datausage_detected.json*. If running on a full filesystem dump and if the `--fast` flag was not enabled by command-line, mvt-ios will highlight processes which look suspicious and check the presence of a binary file of the same name in the dump.
|
||||
|
||||
@@ -202,7 +226,7 @@ This JSON file is created by mvt-ios' `ProfileEvents` module. The module extract
|
||||
|
||||
This JSON file is created by mvt-ios' `SafariBrowserState` module. The module extracts records from the SQLite databases located at */private/var/mobile/Library/Safari/BrowserState.db* or */private/var/mobile/Containers/Data/Application/\*/Library/Safari/BrowserState.db*, which contain records of opened tabs.
|
||||
|
||||
If indicators a provided through the command-line, they are checked against the visited URL. Any matches are stored in *safari_browser_state_detected.json*.
|
||||
If indicators are provided through the command-line, they are checked against the visited URL. Any matches are stored in *safari_browser_state_detected.json*.
|
||||
|
||||
---
|
||||
|
||||
@@ -236,7 +260,7 @@ If indicators are provided through the command-line, they are checked against th
|
||||
Backup (if encrypted): :material-close:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `ShutdownLog` module. The module extracts records from the shutdown log located at *private/var/db/diagnostics/shutdown.log*. When shutting down an iPhone, a SIGTERM will be sent to all processes runnning. The `shutdown.log` file will log any process (with its pid and path) that did not shut down after the SIGTERM was sent.
|
||||
This JSON file is created by mvt-ios' `ShutdownLog` module. The module extracts records from the shutdown log located at *private/var/db/diagnostics/shutdown.log*. When shutting down an iPhone, a SIGTERM will be sent to all processes runnning. The `shutdown.log` file will log any process (with its pid and path) that did not shut down after the SIGTERM was sent.
|
||||
|
||||
If indicators are provided through the command-line, they are checked against the paths. Any matches are stored in *shutdown_log_detected.json*.
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
site_name: Mobile Verification Toolkit
|
||||
repo_url: https://github.com/mvt-project/mvt
|
||||
edit_uri: edit/main/docs/
|
||||
copyright: Copyright © 2021 MVT Project Developers
|
||||
copyright: Copyright © 2021-2022 MVT Project Developers
|
||||
site_description: Mobile Verification Toolkit Documentation
|
||||
markdown_extensions:
|
||||
- attr_list
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
|
||||
@@ -1,24 +1,28 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import click
|
||||
from rich.logging import RichHandler
|
||||
|
||||
from mvt.common.help import *
|
||||
from mvt.common.indicators import Indicators, IndicatorsFileBadFormat
|
||||
from mvt.common.cmd_check_iocs import CmdCheckIOCS
|
||||
from mvt.common.help import (HELP_MSG_FAST, HELP_MSG_IOC,
|
||||
HELP_MSG_LIST_MODULES, HELP_MSG_MODULE,
|
||||
HELP_MSG_OUTPUT, HELP_MSG_SERIAL)
|
||||
from mvt.common.logo import logo
|
||||
from mvt.common.module import run_module, save_timeline
|
||||
from mvt.common.updates import IndicatorsUpdates
|
||||
|
||||
from .download_apks import DownloadAPKs
|
||||
from .lookups.koodous import koodous_lookup
|
||||
from .lookups.virustotal import virustotal_lookup
|
||||
from .cmd_check_adb import CmdAndroidCheckADB
|
||||
from .cmd_check_backup import CmdAndroidCheckBackup
|
||||
from .cmd_check_bugreport import CmdAndroidCheckBugreport
|
||||
from .cmd_download_apks import DownloadAPKs
|
||||
from .modules.adb import ADB_MODULES
|
||||
from .modules.adb.packages import Packages
|
||||
from .modules.backup import BACKUP_MODULES
|
||||
from .modules.bugreport import BUGREPORT_MODULES
|
||||
|
||||
# Setup logging using Rich.
|
||||
LOG_FORMAT = "[%(name)s] %(message)s"
|
||||
@@ -26,6 +30,7 @@ logging.basicConfig(level="INFO", format=LOG_FORMAT, handlers=[
|
||||
RichHandler(show_path=False, log_time_format="%X")])
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Main
|
||||
#==============================================================================
|
||||
@@ -43,21 +48,19 @@ def version():
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Download APKs
|
||||
# Command: download-apks
|
||||
#==============================================================================
|
||||
@cli.command("download-apks", help="Download all or non-safelisted installed APKs installed on the device")
|
||||
@cli.command("download-apks", help="Download all or only non-system installed APKs")
|
||||
@click.option("--serial", "-s", type=str, help=HELP_MSG_SERIAL)
|
||||
@click.option("--all-apks", "-a", is_flag=True,
|
||||
help="Extract all packages installed on the phone, including system packages")
|
||||
@click.option("--virustotal", "-v", is_flag=True, help="Check packages on VirusTotal")
|
||||
@click.option("--koodous", "-k", is_flag=True, help="Check packages on Koodous")
|
||||
@click.option("--all-checks", "-A", is_flag=True, help="Run all available checks")
|
||||
@click.option("--output", "-o", type=click.Path(exists=False),
|
||||
help="Specify a path to a folder where you want to store the APKs")
|
||||
@click.option("--from-file", "-f", type=click.Path(exists=True),
|
||||
help="Instead of acquiring from phone, load an existing packages.json file for lookups (mainly for debug purposes)")
|
||||
@click.pass_context
|
||||
def download_apks(ctx, all_apks, virustotal, koodous, all_checks, output, from_file, serial):
|
||||
def download_apks(ctx, all_apks, virustotal, output, from_file, serial):
|
||||
try:
|
||||
if from_file:
|
||||
download = DownloadAPKs.from_json(from_file)
|
||||
@@ -67,36 +70,32 @@ def download_apks(ctx, all_apks, virustotal, koodous, all_checks, output, from_f
|
||||
log.critical("You need to specify an output folder with --output!")
|
||||
ctx.exit(1)
|
||||
|
||||
if not os.path.exists(output):
|
||||
try:
|
||||
os.makedirs(output)
|
||||
except Exception as e:
|
||||
log.critical("Unable to create output folder %s: %s", output, e)
|
||||
ctx.exit(1)
|
||||
|
||||
download = DownloadAPKs(output_folder=output, all_apks=all_apks,
|
||||
log=logging.getLogger(DownloadAPKs.__module__))
|
||||
download = DownloadAPKs(results_path=output, all_apks=all_apks)
|
||||
if serial:
|
||||
download.serial = serial
|
||||
download.run()
|
||||
|
||||
packages = download.packages
|
||||
packages_to_lookup = []
|
||||
if all_apks:
|
||||
packages_to_lookup = download.packages
|
||||
else:
|
||||
for package in download.packages:
|
||||
if not package.get("system", False):
|
||||
packages_to_lookup.append(package)
|
||||
|
||||
if len(packages) == 0:
|
||||
return
|
||||
if len(packages_to_lookup) == 0:
|
||||
return
|
||||
|
||||
if virustotal or all_checks:
|
||||
virustotal_lookup(packages)
|
||||
|
||||
if koodous or all_checks:
|
||||
koodous_lookup(packages)
|
||||
if virustotal:
|
||||
m = Packages()
|
||||
m.check_virustotal(packages_to_lookup)
|
||||
except KeyboardInterrupt:
|
||||
print("")
|
||||
ctx.exit(1)
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Checks through ADB
|
||||
# Command: check-adb
|
||||
#==============================================================================
|
||||
@cli.command("check-adb", help="Check an Android device over adb")
|
||||
@click.option("--serial", "-s", type=str, help=HELP_MSG_SERIAL)
|
||||
@@ -104,106 +103,108 @@ def download_apks(ctx, all_apks, virustotal, koodous, all_checks, output, from_f
|
||||
default=[], help=HELP_MSG_IOC)
|
||||
@click.option("--output", "-o", type=click.Path(exists=False),
|
||||
help=HELP_MSG_OUTPUT)
|
||||
@click.option("--fast", "-f", is_flag=True, help=HELP_MSG_FAST)
|
||||
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
|
||||
@click.option("--module", "-m", help=HELP_MSG_MODULE)
|
||||
@click.pass_context
|
||||
def check_adb(ctx, iocs, output, list_modules, module, serial):
|
||||
if list_modules:
|
||||
log.info("Following is the list of available check-adb modules:")
|
||||
for adb_module in ADB_MODULES:
|
||||
log.info(" - %s", adb_module.__name__)
|
||||
def check_adb(ctx, serial, iocs, output, fast, list_modules, module):
|
||||
cmd = CmdAndroidCheckADB(results_path=output, ioc_files=iocs,
|
||||
module_name=module, serial=serial, fast_mode=fast)
|
||||
|
||||
if list_modules:
|
||||
cmd.list_modules()
|
||||
return
|
||||
|
||||
log.info("Checking Android through adb bridge")
|
||||
log.info("Checking Android device over debug bridge")
|
||||
|
||||
if output and not os.path.exists(output):
|
||||
try:
|
||||
os.makedirs(output)
|
||||
except Exception as e:
|
||||
log.critical("Unable to create output folder %s: %s", output, e)
|
||||
ctx.exit(1)
|
||||
cmd.run()
|
||||
|
||||
indicators = Indicators(log=log)
|
||||
for ioc_path in iocs:
|
||||
try:
|
||||
indicators.parse_stix2(ioc_path)
|
||||
except IndicatorsFileBadFormat as e:
|
||||
log.critical(e)
|
||||
ctx.exit(1)
|
||||
log.info("Loaded a total of %d indicators", indicators.ioc_count)
|
||||
|
||||
timeline = []
|
||||
timeline_detected = []
|
||||
for adb_module in ADB_MODULES:
|
||||
if module and adb_module.__name__ != module:
|
||||
continue
|
||||
|
||||
m = adb_module(output_folder=output, log=logging.getLogger(adb_module.__module__))
|
||||
if serial:
|
||||
m.serial = serial
|
||||
|
||||
if iocs:
|
||||
indicators.log = m.log
|
||||
m.indicators = indicators
|
||||
|
||||
run_module(m)
|
||||
timeline.extend(m.timeline)
|
||||
timeline_detected.extend(m.timeline_detected)
|
||||
|
||||
if output:
|
||||
if len(timeline) > 0:
|
||||
save_timeline(timeline, os.path.join(output, "timeline.csv"))
|
||||
if len(timeline_detected) > 0:
|
||||
save_timeline(timeline_detected, os.path.join(output, "timeline_detected.csv"))
|
||||
if len(cmd.timeline_detected) > 0:
|
||||
log.warning("The analysis of the Android device produced %d detections!",
|
||||
len(cmd.timeline_detected))
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Check ADB backup
|
||||
# Command: check-bugreport
|
||||
#==============================================================================
|
||||
@cli.command("check-bugreport", help="Check an Android Bug Report")
|
||||
@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True,
|
||||
default=[], help=HELP_MSG_IOC)
|
||||
@click.option("--output", "-o", type=click.Path(exists=False), help=HELP_MSG_OUTPUT)
|
||||
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
|
||||
@click.option("--module", "-m", help=HELP_MSG_MODULE)
|
||||
@click.argument("BUGREPORT_PATH", type=click.Path(exists=True))
|
||||
@click.pass_context
|
||||
def check_bugreport(ctx, iocs, output, list_modules, module, bugreport_path):
|
||||
cmd = CmdAndroidCheckBugreport(target_path=bugreport_path, results_path=output,
|
||||
ioc_files=iocs, module_name=module)
|
||||
|
||||
if list_modules:
|
||||
cmd.list_modules()
|
||||
return
|
||||
|
||||
log.info("Checking Android bug report at path: %s", bugreport_path)
|
||||
|
||||
cmd.run()
|
||||
|
||||
if len(cmd.timeline_detected) > 0:
|
||||
log.warning("The analysis of the Android bug report produced %d detections!",
|
||||
len(cmd.timeline_detected))
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Command: check-backup
|
||||
#==============================================================================
|
||||
@cli.command("check-backup", help="Check an Android Backup")
|
||||
@click.option("--serial", "-s", type=str, help=HELP_MSG_SERIAL)
|
||||
@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True,
|
||||
default=[], help=HELP_MSG_IOC)
|
||||
@click.option("--output", "-o", type=click.Path(exists=False), help=HELP_MSG_OUTPUT)
|
||||
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
|
||||
@click.argument("BACKUP_PATH", type=click.Path(exists=True))
|
||||
@click.pass_context
|
||||
def check_backup(ctx, iocs, output, backup_path, serial):
|
||||
log.info("Checking ADB backup located at: %s", backup_path)
|
||||
def check_backup(ctx, serial, iocs, output, list_modules, backup_path):
|
||||
cmd = CmdAndroidCheckBackup(target_path=backup_path, results_path=output,
|
||||
ioc_files=iocs)
|
||||
|
||||
if output and not os.path.exists(output):
|
||||
try:
|
||||
os.makedirs(output)
|
||||
except Exception as e:
|
||||
log.critical("Unable to create output folder %s: %s", output, e)
|
||||
ctx.exit(1)
|
||||
if list_modules:
|
||||
cmd.list_modules()
|
||||
return
|
||||
|
||||
indicators = Indicators(log=log)
|
||||
for ioc_path in iocs:
|
||||
try:
|
||||
indicators.parse_stix2(ioc_path)
|
||||
except IndicatorsFileBadFormat as e:
|
||||
log.critical(e)
|
||||
ctx.exit(1)
|
||||
log.info("Loaded a total of %d indicators", indicators.ioc_count)
|
||||
log.info("Checking Android backup at path: %s", backup_path)
|
||||
|
||||
if os.path.isfile(backup_path):
|
||||
log.critical("The path you specified is a not a folder!")
|
||||
cmd.run()
|
||||
|
||||
if os.path.basename(backup_path) == "backup.ab":
|
||||
log.info("You can use ABE (https://github.com/nelenkov/android-backup-extractor) " \
|
||||
"to extract 'backup.ab' files!")
|
||||
ctx.exit(1)
|
||||
if len(cmd.timeline_detected) > 0:
|
||||
log.warning("The analysis of the Android backup produced %d detections!",
|
||||
len(cmd.timeline_detected))
|
||||
|
||||
for module in BACKUP_MODULES:
|
||||
m = module(base_folder=backup_path, output_folder=output,
|
||||
log=logging.getLogger(module.__module__))
|
||||
|
||||
if serial:
|
||||
m.serial = serial
|
||||
#==============================================================================
|
||||
# Command: check-iocs
|
||||
#==============================================================================
|
||||
@cli.command("check-iocs", help="Compare stored JSON results to provided indicators")
|
||||
@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True,
|
||||
default=[], help=HELP_MSG_IOC)
|
||||
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
|
||||
@click.option("--module", "-m", help=HELP_MSG_MODULE)
|
||||
@click.argument("FOLDER", type=click.Path(exists=True))
|
||||
@click.pass_context
|
||||
def check_iocs(ctx, iocs, list_modules, module, folder):
|
||||
cmd = CmdCheckIOCS(target_path=folder, ioc_files=iocs, module_name=module)
|
||||
cmd.modules = BACKUP_MODULES + ADB_MODULES + BUGREPORT_MODULES
|
||||
|
||||
if iocs:
|
||||
indicators.log = m.log
|
||||
m.indicators = indicators
|
||||
if list_modules:
|
||||
cmd.list_modules()
|
||||
return
|
||||
|
||||
run_module(m)
|
||||
cmd.run()
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Command: download-iocs
|
||||
#==============================================================================
|
||||
@cli.command("download-iocs", help="Download public STIX2 indicators")
|
||||
def download_indicators():
|
||||
ioc_updates = IndicatorsUpdates()
|
||||
ioc_updates.update()
|
||||
|
||||
25
mvt/android/cmd_check_adb.py
Normal file
25
mvt/android/cmd_check_adb.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.common.command import Command
|
||||
|
||||
from .modules.adb import ADB_MODULES
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CmdAndroidCheckADB(Command):
|
||||
|
||||
name = "check-adb"
|
||||
modules = ADB_MODULES
|
||||
|
||||
def __init__(self, target_path: str = None, results_path: str = None,
|
||||
ioc_files: list = [], module_name: str = None, serial: str = None,
|
||||
fast_mode: bool = False):
|
||||
super().__init__(target_path=target_path, results_path=results_path,
|
||||
ioc_files=ioc_files, module_name=module_name,
|
||||
serial=serial, fast_mode=fast_mode, log=log)
|
||||
85
mvt/android/cmd_check_backup.py
Normal file
85
mvt/android/cmd_check_backup.py
Normal file
@@ -0,0 +1,85 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import tarfile
|
||||
from pathlib import Path
|
||||
from typing import Callable
|
||||
|
||||
from rich.prompt import Prompt
|
||||
|
||||
from mvt.android.parsers.backup import (AndroidBackupParsingError,
|
||||
InvalidBackupPassword, parse_ab_header,
|
||||
parse_backup_file)
|
||||
from mvt.common.command import Command
|
||||
|
||||
from .modules.backup import BACKUP_MODULES
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CmdAndroidCheckBackup(Command):
|
||||
|
||||
name = "check-backup"
|
||||
modules = BACKUP_MODULES
|
||||
|
||||
def __init__(self, target_path: str = None, results_path: str = None,
|
||||
ioc_files: list = [], module_name: str = None, serial: str = None,
|
||||
fast_mode: bool = False):
|
||||
super().__init__(target_path=target_path, results_path=results_path,
|
||||
ioc_files=ioc_files, module_name=module_name,
|
||||
serial=serial, fast_mode=fast_mode, log=log)
|
||||
|
||||
self.backup_type = None
|
||||
self.backup_archive = None
|
||||
self.backup_files = []
|
||||
|
||||
def init(self) -> None:
|
||||
if os.path.isfile(self.target_path):
|
||||
self.backup_type = "ab"
|
||||
with open(self.target_path, "rb") as handle:
|
||||
data = handle.read()
|
||||
|
||||
header = parse_ab_header(data)
|
||||
if not header["backup"]:
|
||||
log.critical("Invalid backup format, file should be in .ab format")
|
||||
sys.exit(1)
|
||||
|
||||
password = None
|
||||
if header["encryption"] != "none":
|
||||
password = Prompt.ask("Enter backup password", password=True)
|
||||
try:
|
||||
tardata = parse_backup_file(data, password=password)
|
||||
except InvalidBackupPassword:
|
||||
log.critical("Invalid backup password")
|
||||
sys.exit(1)
|
||||
except AndroidBackupParsingError as e:
|
||||
log.critical("Impossible to parse this backup file: %s", e)
|
||||
log.critical("Please use Android Backup Extractor (ABE) instead")
|
||||
sys.exit(1)
|
||||
|
||||
dbytes = io.BytesIO(tardata)
|
||||
self.backup_archive = tarfile.open(fileobj=dbytes)
|
||||
for member in self.backup_archive:
|
||||
self.backup_files.append(member.name)
|
||||
|
||||
elif os.path.isdir(self.target_path):
|
||||
self.backup_type = "folder"
|
||||
self.target_path = Path(self.target_path).absolute().as_posix()
|
||||
for root, subdirs, subfiles in os.walk(os.path.abspath(self.target_path)):
|
||||
for fname in subfiles:
|
||||
self.backup_files.append(os.path.relpath(os.path.join(root, fname), self.target_path))
|
||||
else:
|
||||
log.critical("Invalid backup path, path should be a folder or an Android Backup (.ab) file")
|
||||
sys.exit(1)
|
||||
|
||||
def module_init(self, module: Callable) -> None:
|
||||
if self.backup_type == "folder":
|
||||
module.from_folder(self.target_path, self.backup_files)
|
||||
else:
|
||||
module.from_ab(self.target_path, self.backup_archive, self.backup_files)
|
||||
52
mvt/android/cmd_check_bugreport.py
Normal file
52
mvt/android/cmd_check_bugreport.py
Normal file
@@ -0,0 +1,52 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Callable
|
||||
from zipfile import ZipFile
|
||||
|
||||
from mvt.common.command import Command
|
||||
|
||||
from .modules.bugreport import BUGREPORT_MODULES
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CmdAndroidCheckBugreport(Command):
|
||||
|
||||
name = "check-bugreport"
|
||||
modules = BUGREPORT_MODULES
|
||||
|
||||
def __init__(self, target_path: str = None, results_path: str = None,
|
||||
ioc_files: list = [], module_name: str = None, serial: str = None,
|
||||
fast_mode: bool = False):
|
||||
super().__init__(target_path=target_path, results_path=results_path,
|
||||
ioc_files=ioc_files, module_name=module_name,
|
||||
serial=serial, fast_mode=fast_mode, log=log)
|
||||
|
||||
self.bugreport_format = None
|
||||
self.bugreport_archive = None
|
||||
self.bugreport_files = []
|
||||
|
||||
def init(self) -> None:
|
||||
if os.path.isfile(self.target_path):
|
||||
self.bugreport_format = "zip"
|
||||
self.bugreport_archive = ZipFile(self.target_path)
|
||||
for file_name in self.bugreport_archive.namelist():
|
||||
self.bugreport_files.append(file_name)
|
||||
elif os.path.isdir(self.target_path):
|
||||
self.bugreport_format = "dir"
|
||||
parent_path = Path(self.target_path).absolute().as_posix()
|
||||
for root, subdirs, subfiles in os.walk(os.path.abspath(self.target_path)):
|
||||
for file_name in subfiles:
|
||||
self.bugreport_files.append(os.path.relpath(os.path.join(root, file_name), parent_path))
|
||||
|
||||
def module_init(self, module: Callable) -> None:
|
||||
if self.bugreport_format == "zip":
|
||||
module.from_zip(self.bugreport_archive, self.bugreport_files)
|
||||
else:
|
||||
module.from_folder(self.target_path, self.bugreport_files)
|
||||
@@ -1,14 +1,14 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from typing import Callable
|
||||
|
||||
import pkg_resources
|
||||
from tqdm import tqdm
|
||||
from rich.progress import track
|
||||
|
||||
from mvt.common.module import InsufficientPrivileges
|
||||
|
||||
@@ -17,17 +17,6 @@ from .modules.adb.packages import Packages
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# TODO: Would be better to replace tqdm with rich.progress to reduce
|
||||
# the number of dependencies. Need to investigate whether
|
||||
# it's possible to have a similar callback system.
|
||||
class PullProgress(tqdm):
|
||||
"""PullProgress is a tqdm update system for APK downloads."""
|
||||
|
||||
def update_to(self, file_name, current, total):
|
||||
if total is not None:
|
||||
self.total = total
|
||||
self.update(current - self.n)
|
||||
|
||||
|
||||
class DownloadAPKs(AndroidExtraction):
|
||||
"""DownloadAPKs is the main class operating the download of APKs
|
||||
@@ -36,32 +25,32 @@ class DownloadAPKs(AndroidExtraction):
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, output_folder=None, all_apks=False, log=None,
|
||||
packages=None):
|
||||
def __init__(self, results_path: str = "", all_apks: bool = False,
|
||||
packages: list = []):
|
||||
"""Initialize module.
|
||||
:param output_folder: Path to the folder where data should be stored
|
||||
:param results_path: Path to the folder where data should be stored
|
||||
:param all_apks: Boolean indicating whether to download all packages
|
||||
or filter known-goods
|
||||
:param packages: Provided list of packages, typically for JSON checks
|
||||
"""
|
||||
super().__init__(output_folder=output_folder, log=log)
|
||||
super().__init__(results_path=results_path, log=log)
|
||||
|
||||
self.packages = packages
|
||||
self.all_apks = all_apks
|
||||
self.output_folder_apk = None
|
||||
self.results_path_apks = None
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json_path):
|
||||
def from_json(cls, json_path: str) -> Callable:
|
||||
"""Initialize this class from an existing apks.json file.
|
||||
|
||||
:param json_path: Path to the apks.json file to parse.
|
||||
|
||||
"""
|
||||
with open(json_path, "r") as handle:
|
||||
with open(json_path, "r", encoding="utf-8") as handle:
|
||||
packages = json.load(handle)
|
||||
return cls(packages=packages)
|
||||
|
||||
def pull_package_file(self, package_name, remote_path):
|
||||
def pull_package_file(self, package_name: str, remote_path: str) -> None:
|
||||
"""Pull files related to specific package from the device.
|
||||
|
||||
:param package_name: Name of the package to download
|
||||
@@ -75,7 +64,7 @@ class DownloadAPKs(AndroidExtraction):
|
||||
if "==/" in remote_path:
|
||||
file_name = "_" + remote_path.split("==/")[1].replace(".apk", "")
|
||||
|
||||
local_path = os.path.join(self.output_folder_apk,
|
||||
local_path = os.path.join(self.results_path_apks,
|
||||
f"{package_name}{file_name}.apk")
|
||||
name_counter = 0
|
||||
while True:
|
||||
@@ -83,17 +72,14 @@ class DownloadAPKs(AndroidExtraction):
|
||||
break
|
||||
|
||||
name_counter += 1
|
||||
local_path = os.path.join(self.output_folder_apk,
|
||||
local_path = os.path.join(self.results_path_apks,
|
||||
f"{package_name}{file_name}_{name_counter}.apk")
|
||||
|
||||
try:
|
||||
with PullProgress(unit='B', unit_divisor=1024, unit_scale=True,
|
||||
miniters=1) as pp:
|
||||
self._adb_download(remote_path, local_path,
|
||||
progress_callback=pp.update_to)
|
||||
self._adb_download(remote_path, local_path)
|
||||
except InsufficientPrivileges:
|
||||
log.warn("Unable to pull package file from %s: insufficient privileges, it might be a system app",
|
||||
remote_path)
|
||||
log.error("Unable to pull package file from %s: insufficient privileges, it might be a system app",
|
||||
remote_path)
|
||||
self._adb_reconnect()
|
||||
return None
|
||||
except Exception as e:
|
||||
@@ -104,26 +90,24 @@ class DownloadAPKs(AndroidExtraction):
|
||||
|
||||
return local_path
|
||||
|
||||
def get_packages(self):
|
||||
def get_packages(self) -> None:
|
||||
"""Use the Packages adb module to retrieve the list of packages.
|
||||
We reuse the same extraction logic to then download the APKs.
|
||||
|
||||
|
||||
"""
|
||||
self.log.info("Retrieving list of installed packages...")
|
||||
|
||||
m = Packages()
|
||||
m.log = self.log
|
||||
m.serial = self.serial
|
||||
m.run()
|
||||
|
||||
self.packages = m.results
|
||||
|
||||
def pull_packages(self):
|
||||
"""Download all files of all selected packages from the device."""
|
||||
log.info("Starting extraction of installed APKs at folder %s", self.output_folder)
|
||||
|
||||
if not os.path.exists(self.output_folder):
|
||||
os.mkdir(self.output_folder)
|
||||
def pull_packages(self) -> None:
|
||||
"""Download all files of all selected packages from the device.
|
||||
"""
|
||||
log.info("Starting extraction of installed APKs at folder %s",
|
||||
self.results_path)
|
||||
|
||||
# If the user provided the flag --all-apks we select all packages.
|
||||
packages_selection = []
|
||||
@@ -137,8 +121,8 @@ class DownloadAPKs(AndroidExtraction):
|
||||
if not package.get("system", False):
|
||||
packages_selection.append(package)
|
||||
|
||||
log.info("Selected only %d packages which are not marked as system",
|
||||
len(packages_selection))
|
||||
log.info("Selected only %d packages which are not marked as \"system\"",
|
||||
len(packages_selection))
|
||||
|
||||
if len(packages_selection) == 0:
|
||||
log.info("No packages were selected for download")
|
||||
@@ -146,15 +130,15 @@ class DownloadAPKs(AndroidExtraction):
|
||||
|
||||
log.info("Downloading packages from device. This might take some time ...")
|
||||
|
||||
self.output_folder_apk = os.path.join(self.output_folder, "apks")
|
||||
if not os.path.exists(self.output_folder_apk):
|
||||
os.mkdir(self.output_folder_apk)
|
||||
self.results_path_apks = os.path.join(self.results_path, "apks")
|
||||
if not os.path.exists(self.results_path_apks):
|
||||
os.makedirs(self.results_path_apks, exist_ok=True)
|
||||
|
||||
counter = 0
|
||||
for package in packages_selection:
|
||||
counter += 1
|
||||
for i in track(range(len(packages_selection)),
|
||||
description=f"Downloading {len(packages_selection)} packages..."):
|
||||
package = packages_selection[i]
|
||||
|
||||
log.info("[%d/%d] Package: %s", counter, len(packages_selection),
|
||||
log.info("[%d/%d] Package: %s", i, len(packages_selection),
|
||||
package["package_name"])
|
||||
|
||||
# Sometimes the package path contains multiple lines for multiple apks.
|
||||
@@ -170,14 +154,12 @@ class DownloadAPKs(AndroidExtraction):
|
||||
|
||||
log.info("Download of selected packages completed")
|
||||
|
||||
def save_json(self):
|
||||
"""Save the results to the package.json file."""
|
||||
json_path = os.path.join(self.output_folder, "apks.json")
|
||||
with open(json_path, "w") as handle:
|
||||
def save_json(self) -> None:
|
||||
json_path = os.path.join(self.results_path, "apks.json")
|
||||
with open(json_path, "w", encoding="utf-8") as handle:
|
||||
json.dump(self.packages, handle, indent=4)
|
||||
|
||||
def run(self):
|
||||
"""Run all steps of fetch-apk."""
|
||||
def run(self) -> None:
|
||||
self.get_packages()
|
||||
self._adb_connect()
|
||||
self.pull_packages()
|
||||
@@ -1,10 +0,0 @@
|
||||
su
|
||||
busybox
|
||||
supersu
|
||||
Superuser.apk
|
||||
KingoUser.apk
|
||||
SuperSu.apk
|
||||
magisk
|
||||
magiskhide
|
||||
magiskinit
|
||||
magiskpolicy
|
||||
@@ -1,25 +0,0 @@
|
||||
com.noshufou.android.su
|
||||
com.noshufou.android.su.elite
|
||||
eu.chainfire.supersu
|
||||
com.koushikdutta.superuser
|
||||
com.thirdparty.superuser
|
||||
com.yellowes.su
|
||||
com.koushikdutta.rommanager
|
||||
com.koushikdutta.rommanager.license
|
||||
com.dimonvideo.luckypatcher
|
||||
com.chelpus.lackypatch
|
||||
com.ramdroid.appquarantine
|
||||
com.ramdroid.appquarantinepro
|
||||
com.devadvance.rootcloak
|
||||
com.devadvance.rootcloakplus
|
||||
de.robv.android.xposed.installer
|
||||
com.saurik.substrate
|
||||
com.zachspong.temprootremovejb
|
||||
com.amphoras.hidemyroot
|
||||
com.amphoras.hidemyrootadfree
|
||||
com.formyhm.hiderootPremium
|
||||
com.formyhm.hideroot
|
||||
me.phh.superuser
|
||||
eu.chainfire.supersu.pro
|
||||
com.kingouser.com
|
||||
com.topjohnwu.magisk
|
||||
@@ -1,4 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
@@ -1,57 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
import requests
|
||||
from rich.console import Console
|
||||
from rich.progress import track
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def koodous_lookup(packages):
|
||||
log.info("Looking up all extracted files on Koodous (www.koodous.com)")
|
||||
log.info("This might take a while...")
|
||||
|
||||
table = Table(title="Koodous Packages Detections")
|
||||
table.add_column("Package name")
|
||||
table.add_column("File name")
|
||||
table.add_column("Trusted")
|
||||
table.add_column("Detected")
|
||||
table.add_column("Rating")
|
||||
|
||||
total_packages = len(packages)
|
||||
for i in track(range(total_packages), description=f"Looking up {total_packages} packages..."):
|
||||
package = packages[i]
|
||||
for file in package.get("files", []):
|
||||
url = f"https://api.koodous.com/apks/{file['sha256']}"
|
||||
res = requests.get(url)
|
||||
report = res.json()
|
||||
|
||||
row = [package["package_name"], file["path"]]
|
||||
|
||||
if "package_name" in report:
|
||||
trusted = "no"
|
||||
if report["trusted"]:
|
||||
trusted = Text("yes", "green bold")
|
||||
|
||||
detected = "no"
|
||||
if report["detected"]:
|
||||
detected = Text("yes", "red bold")
|
||||
|
||||
rating = "0"
|
||||
if int(report["rating"]) < 0:
|
||||
rating = Text(str(report["rating"]), "red bold")
|
||||
|
||||
row.extend([trusted, detected, rating])
|
||||
else:
|
||||
row.extend(["n/a", "n/a", "n/a"])
|
||||
|
||||
table.add_row(*row)
|
||||
|
||||
console = Console()
|
||||
console.print(table)
|
||||
@@ -1,93 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
import requests
|
||||
from rich.console import Console
|
||||
from rich.progress import track
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
def get_virustotal_report(hashes):
|
||||
apikey = "233f22e200ca5822bd91103043ccac138b910db79f29af5616a9afe8b6f215ad"
|
||||
url = f"https://www.virustotal.com/partners/sysinternals/file-reports?apikey={apikey}"
|
||||
|
||||
items = []
|
||||
for sha256 in hashes:
|
||||
items.append({
|
||||
"autostart_location": "",
|
||||
"autostart_entry": "",
|
||||
"hash": sha256,
|
||||
"local_name": "",
|
||||
"creation_datetime": "",
|
||||
})
|
||||
headers = {"User-Agent": "VirusTotal", "Content-Type": "application/json"}
|
||||
res = requests.post(url, headers=headers, json=items)
|
||||
|
||||
if res.status_code == 200:
|
||||
report = res.json()
|
||||
return report["data"]
|
||||
else:
|
||||
log.error("Unexpected response from VirusTotal: %s", res.status_code)
|
||||
return None
|
||||
|
||||
def virustotal_lookup(packages):
|
||||
log.info("Looking up all extracted files on VirusTotal (www.virustotal.com)")
|
||||
|
||||
unique_hashes = []
|
||||
for package in packages:
|
||||
for file in package.get("files", []):
|
||||
if file["sha256"] not in unique_hashes:
|
||||
unique_hashes.append(file["sha256"])
|
||||
|
||||
total_unique_hashes = len(unique_hashes)
|
||||
|
||||
detections = {}
|
||||
def virustotal_query(batch):
|
||||
report = get_virustotal_report(batch)
|
||||
if not report:
|
||||
return
|
||||
|
||||
for entry in report:
|
||||
if entry["hash"] not in detections and entry["found"] is True:
|
||||
detections[entry["hash"]] = entry["detection_ratio"]
|
||||
|
||||
batch = []
|
||||
for i in track(range(total_unique_hashes), description=f"Looking up {total_unique_hashes} files..."):
|
||||
file_hash = unique_hashes[i]
|
||||
batch.append(file_hash)
|
||||
if len(batch) == 25:
|
||||
virustotal_query(batch)
|
||||
batch = []
|
||||
|
||||
if batch:
|
||||
virustotal_query(batch)
|
||||
|
||||
table = Table(title="VirusTotal Packages Detections")
|
||||
table.add_column("Package name")
|
||||
table.add_column("File path")
|
||||
table.add_column("Detections")
|
||||
|
||||
for package in packages:
|
||||
for file in package.get("files", []):
|
||||
row = [package["package_name"], file["path"]]
|
||||
|
||||
if file["sha256"] in detections:
|
||||
detection = detections[file["sha256"]]
|
||||
positives = detection.split("/")[0]
|
||||
if int(positives) > 0:
|
||||
row.append(Text(detection, "red bold"))
|
||||
else:
|
||||
row.append(detection)
|
||||
else:
|
||||
row.append("not found")
|
||||
|
||||
table.add_row(*row)
|
||||
|
||||
console = Console()
|
||||
console.print(table)
|
||||
@@ -1,4 +1,4 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
@@ -1,23 +1,30 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
from .chrome_history import ChromeHistory
|
||||
from .dumpsys_batterystats import DumpsysBatterystats
|
||||
from .dumpsys_accessibility import DumpsysAccessibility
|
||||
from .dumpsys_activities import DumpsysActivities
|
||||
from .dumpsys_appops import DumpsysAppOps
|
||||
from .dumpsys_battery_daily import DumpsysBatteryDaily
|
||||
from .dumpsys_battery_history import DumpsysBatteryHistory
|
||||
from .dumpsys_dbinfo import DumpsysDBInfo
|
||||
from .dumpsys_full import DumpsysFull
|
||||
from .dumpsys_packages import DumpsysPackages
|
||||
from .dumpsys_procstats import DumpsysProcstats
|
||||
from .dumpsys_receivers import DumpsysReceivers
|
||||
from .files import Files
|
||||
from .getprop import Getprop
|
||||
from .logcat import Logcat
|
||||
from .packages import Packages
|
||||
from .processes import Processes
|
||||
from .rootbinaries import RootBinaries
|
||||
from .root_binaries import RootBinaries
|
||||
from .selinux_status import SELinuxStatus
|
||||
from .settings import Settings
|
||||
from .sms import SMS
|
||||
from .whatsapp import Whatsapp
|
||||
|
||||
ADB_MODULES = [ChromeHistory, SMS, Whatsapp, Processes,
|
||||
DumpsysBatterystats, DumpsysProcstats,
|
||||
DumpsysPackages, DumpsysReceivers, DumpsysFull,
|
||||
Packages, RootBinaries, Logcat, Files]
|
||||
ADB_MODULES = [ChromeHistory, SMS, Whatsapp, Processes, Getprop, Settings,
|
||||
SELinuxStatus, DumpsysBatteryHistory, DumpsysBatteryDaily,
|
||||
DumpsysReceivers, DumpsysActivities, DumpsysAccessibility,
|
||||
DumpsysDBInfo, DumpsysFull, DumpsysAppOps, Packages, Logcat,
|
||||
RootBinaries, Files]
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import base64
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
@@ -10,14 +11,18 @@ import string
|
||||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
from typing import Callable
|
||||
|
||||
from adb_shell.adb_device import AdbDeviceTcp, AdbDeviceUsb
|
||||
from adb_shell.auth.keygen import keygen, write_public_keyfile
|
||||
from adb_shell.auth.sign_pythonrsa import PythonRSASigner
|
||||
from adb_shell.exceptions import (AdbCommandFailureException, DeviceAuthError,
|
||||
UsbReadFailedError)
|
||||
UsbDeviceNotFoundError, UsbReadFailedError)
|
||||
from rich.prompt import Prompt
|
||||
from usb1 import USBErrorAccess, USBErrorBusy
|
||||
|
||||
from mvt.android.parsers.backup import (InvalidBackupPassword, parse_ab_header,
|
||||
parse_backup_file)
|
||||
from mvt.common.module import InsufficientPrivileges, MVTModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -25,20 +30,22 @@ log = logging.getLogger(__name__)
|
||||
ADB_KEY_PATH = os.path.expanduser("~/.android/adbkey")
|
||||
ADB_PUB_KEY_PATH = os.path.expanduser("~/.android/adbkey.pub")
|
||||
|
||||
|
||||
class AndroidExtraction(MVTModule):
|
||||
"""This class provides a base for all Android extraction modules."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.device = None
|
||||
self.serial = None
|
||||
|
||||
@staticmethod
|
||||
def _adb_check_keys():
|
||||
def _adb_check_keys() -> None:
|
||||
"""Make sure Android adb keys exist."""
|
||||
if not os.path.isdir(os.path.dirname(ADB_KEY_PATH)):
|
||||
os.makedirs(os.path.dirname(ADB_KEY_PATH))
|
||||
@@ -49,7 +56,7 @@ class AndroidExtraction(MVTModule):
|
||||
if not os.path.exists(ADB_PUB_KEY_PATH):
|
||||
write_public_keyfile(ADB_KEY_PATH, ADB_PUB_KEY_PATH)
|
||||
|
||||
def _adb_connect(self):
|
||||
def _adb_connect(self) -> None:
|
||||
"""Connect to the device over adb."""
|
||||
self._adb_check_keys()
|
||||
|
||||
@@ -64,7 +71,11 @@ class AndroidExtraction(MVTModule):
|
||||
# If no serial was specified or if the serial does not seem to be
|
||||
# a HOST:PORT definition, we use the USB transport.
|
||||
if not self.serial or ":" not in self.serial:
|
||||
self.device = AdbDeviceUsb(serial=self.serial)
|
||||
try:
|
||||
self.device = AdbDeviceUsb(serial=self.serial)
|
||||
except UsbDeviceNotFoundError:
|
||||
log.critical("No device found. Make sure it is connected and unlocked.")
|
||||
sys.exit(-1)
|
||||
# Otherwise we try to use the TCP transport.
|
||||
else:
|
||||
addr = self.serial.split(":")
|
||||
@@ -89,31 +100,31 @@ class AndroidExtraction(MVTModule):
|
||||
except OSError as e:
|
||||
if e.errno == 113 and self.serial:
|
||||
log.critical("Unable to connect to the device %s: did you specify the correct IP addres?",
|
||||
self.serial)
|
||||
self.serial)
|
||||
sys.exit(-1)
|
||||
else:
|
||||
break
|
||||
|
||||
def _adb_disconnect(self):
|
||||
def _adb_disconnect(self) -> None:
|
||||
"""Close adb connection to the device."""
|
||||
self.device.close()
|
||||
|
||||
def _adb_reconnect(self):
|
||||
def _adb_reconnect(self) -> None:
|
||||
"""Reconnect to device using adb."""
|
||||
log.info("Reconnecting ...")
|
||||
self._adb_disconnect()
|
||||
self._adb_connect()
|
||||
|
||||
def _adb_command(self, command):
|
||||
def _adb_command(self, command: str) -> str:
|
||||
"""Execute an adb shell command.
|
||||
|
||||
:param command: Shell command to execute
|
||||
:returns: Output of command
|
||||
|
||||
"""
|
||||
return self.device.shell(command)
|
||||
return self.device.shell(command, read_timeout_s=200.0)
|
||||
|
||||
def _adb_check_if_root(self):
|
||||
def _adb_check_if_root(self) -> bool:
|
||||
"""Check if we have a `su` binary on the Android device.
|
||||
|
||||
|
||||
@@ -122,7 +133,7 @@ class AndroidExtraction(MVTModule):
|
||||
"""
|
||||
return bool(self._adb_command("command -v su"))
|
||||
|
||||
def _adb_root_or_die(self):
|
||||
def _adb_root_or_die(self) -> None:
|
||||
"""Check if we have a `su` binary, otherwise raise an Exception."""
|
||||
if not self._adb_check_if_root():
|
||||
raise InsufficientPrivileges("This module is optionally available in case the device is already rooted. Do NOT root your own device!")
|
||||
@@ -136,7 +147,7 @@ class AndroidExtraction(MVTModule):
|
||||
"""
|
||||
return self._adb_command(f"su -c {command}")
|
||||
|
||||
def _adb_check_file_exists(self, file):
|
||||
def _adb_check_file_exists(self, file: str) -> bool:
|
||||
"""Verify that a file exists.
|
||||
|
||||
:param file: Path of the file
|
||||
@@ -153,7 +164,9 @@ class AndroidExtraction(MVTModule):
|
||||
|
||||
return bool(self._adb_command_as_root(f"[ ! -f {file} ] || echo 1"))
|
||||
|
||||
def _adb_download(self, remote_path, local_path, progress_callback=None, retry_root=True):
|
||||
def _adb_download(self, remote_path: str, local_path: str,
|
||||
progress_callback: Callable = None,
|
||||
retry_root: bool = True) -> None:
|
||||
"""Download a file form the device.
|
||||
|
||||
:param remote_path: Path to download from the device
|
||||
@@ -170,7 +183,8 @@ class AndroidExtraction(MVTModule):
|
||||
else:
|
||||
raise Exception(f"Unable to download file {remote_path}: {e}")
|
||||
|
||||
def _adb_download_root(self, remote_path, local_path, progress_callback=None):
|
||||
def _adb_download_root(self, remote_path: str, local_path: str,
|
||||
progress_callback: Callable = None) -> None:
|
||||
try:
|
||||
# Check if we have root, if not raise an Exception.
|
||||
self._adb_root_or_die()
|
||||
@@ -198,7 +212,8 @@ class AndroidExtraction(MVTModule):
|
||||
except AdbCommandFailureException as e:
|
||||
raise Exception(f"Unable to download file {remote_path}: {e}")
|
||||
|
||||
def _adb_process_file(self, remote_path, process_routine):
|
||||
def _adb_process_file(self, remote_path: str,
|
||||
process_routine: Callable) -> None:
|
||||
"""Download a local copy of a file which is only accessible as root.
|
||||
This is a wrapper around process_routine.
|
||||
|
||||
@@ -238,6 +253,32 @@ class AndroidExtraction(MVTModule):
|
||||
# Disconnect from the device.
|
||||
self._adb_disconnect()
|
||||
|
||||
def run(self):
|
||||
def _generate_backup(self, package_name: str) -> bytes:
|
||||
self.log.warning("Please check phone and accept Android backup prompt. You may need to set a backup password. \a")
|
||||
|
||||
# TODO: Base64 encoding as temporary fix to avoid byte-mangling over the shell transport...
|
||||
backup_output_b64 = self._adb_command("/system/bin/bu backup -nocompress '{}' | base64".format(
|
||||
package_name))
|
||||
backup_output = base64.b64decode(backup_output_b64)
|
||||
header = parse_ab_header(backup_output)
|
||||
|
||||
if not header["backup"]:
|
||||
self.log.error("Extracting SMS via Android backup failed. No valid backup data found.")
|
||||
return
|
||||
|
||||
if header["encryption"] == "none":
|
||||
return parse_backup_file(backup_output, password=None)
|
||||
|
||||
for password_retry in range(0, 3):
|
||||
backup_password = Prompt.ask("Enter backup password", password=True)
|
||||
try:
|
||||
decrypted_backup_tar = parse_backup_file(backup_output, backup_password)
|
||||
return decrypted_backup_tar
|
||||
except InvalidBackupPassword:
|
||||
self.log.error("You provided the wrong password! Please try again...")
|
||||
|
||||
self.log.warn("All attempts to decrypt backup with password failed!")
|
||||
|
||||
def run(self) -> None:
|
||||
"""Run the main procedure."""
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
@@ -16,16 +16,18 @@ log = logging.getLogger(__name__)
|
||||
|
||||
CHROME_HISTORY_PATH = "data/data/com.android.chrome/app_chrome/Default/History"
|
||||
|
||||
|
||||
class ChromeHistory(AndroidExtraction):
|
||||
"""This module extracts records from Android's Chrome browsing history."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
def serialize(self, record: dict) -> None:
|
||||
return {
|
||||
"timestamp": record["isodate"],
|
||||
"module": self.__class__.__name__,
|
||||
@@ -33,7 +35,7 @@ class ChromeHistory(AndroidExtraction):
|
||||
"data": f"{record['id']} - {record['url']} (visit ID: {record['visit_id']}, redirect source: {record['redirect_source']})"
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
@@ -41,7 +43,7 @@ class ChromeHistory(AndroidExtraction):
|
||||
if self.indicators.check_domain(result["url"]):
|
||||
self.detected.append(result)
|
||||
|
||||
def _parse_db(self, db_path):
|
||||
def _parse_db(self, db_path: str) -> None:
|
||||
"""Parse a Chrome History database file.
|
||||
|
||||
:param db_path: Path to the History database to process.
|
||||
@@ -67,7 +69,7 @@ class ChromeHistory(AndroidExtraction):
|
||||
"url": item[1],
|
||||
"visit_id": item[2],
|
||||
"timestamp": item[3],
|
||||
"isodate": convert_timestamp_to_iso(convert_chrometime_to_unix[item[3]]),
|
||||
"isodate": convert_timestamp_to_iso(convert_chrometime_to_unix(item[3])),
|
||||
"redirect_source": item[4],
|
||||
})
|
||||
|
||||
@@ -76,6 +78,9 @@ class ChromeHistory(AndroidExtraction):
|
||||
|
||||
log.info("Extracted a total of %d history items", len(self.results))
|
||||
|
||||
def run(self):
|
||||
self._adb_process_file(os.path.join("/", CHROME_HISTORY_PATH),
|
||||
self._parse_db)
|
||||
def run(self) -> None:
|
||||
try:
|
||||
self._adb_process_file(os.path.join("/", CHROME_HISTORY_PATH),
|
||||
self._parse_db)
|
||||
except Exception as e:
|
||||
self.log.error(e)
|
||||
|
||||
46
mvt/android/modules/adb/dumpsys_accessibility.py
Normal file
46
mvt/android/modules/adb/dumpsys_accessibility.py
Normal file
@@ -0,0 +1,46 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_accessibility
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DumpsysAccessibility(AndroidExtraction):
|
||||
"""This module extracts stats on accessibility."""
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_app_id(result["package_name"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self) -> None:
|
||||
self._adb_connect()
|
||||
output = self._adb_command("dumpsys accessibility")
|
||||
self._adb_disconnect()
|
||||
|
||||
self.results = parse_dumpsys_accessibility(output)
|
||||
|
||||
for result in self.results:
|
||||
log.info("Found installed accessibility service \"%s\"", result.get("service"))
|
||||
|
||||
self.log.info("Identified a total of %d accessibility services", len(self.results))
|
||||
46
mvt/android/modules/adb/dumpsys_activities.py
Normal file
46
mvt/android/modules/adb/dumpsys_activities.py
Normal file
@@ -0,0 +1,46 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_activity_resolver_table
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DumpsysActivities(AndroidExtraction):
|
||||
"""This module extracts details on receivers for risky activities."""
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.results = results if results else {}
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for intent, activities in self.results.items():
|
||||
for activity in activities:
|
||||
ioc = self.indicators.check_app_id(activity["package_name"])
|
||||
if ioc:
|
||||
activity["matched_indicator"] = ioc
|
||||
self.detected.append({intent: activity})
|
||||
continue
|
||||
|
||||
def run(self) -> None:
|
||||
self._adb_connect()
|
||||
output = self._adb_command("dumpsys package")
|
||||
self._adb_disconnect()
|
||||
|
||||
self.results = parse_dumpsys_activity_resolver_table(output)
|
||||
|
||||
self.log.info("Extracted activities for %d intents", len(self.results))
|
||||
66
mvt/android/modules/adb/dumpsys_appops.py
Normal file
66
mvt/android/modules/adb/dumpsys_appops.py
Normal file
@@ -0,0 +1,66 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers.dumpsys import parse_dumpsys_appops
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DumpsysAppOps(AndroidExtraction):
|
||||
"""This module extracts records from App-op Manager."""
|
||||
|
||||
slug = "dumpsys_appops"
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record: dict) -> None:
|
||||
records = []
|
||||
for perm in record["permissions"]:
|
||||
if "entries" not in perm:
|
||||
continue
|
||||
|
||||
for entry in perm["entries"]:
|
||||
if "timestamp" in entry:
|
||||
records.append({
|
||||
"timestamp": entry["timestamp"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": entry["access"],
|
||||
"data": f"{record['package_name']} access to {perm['name']}: {entry['access']}",
|
||||
})
|
||||
|
||||
return records
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
for result in self.results:
|
||||
if self.indicators:
|
||||
ioc = self.indicators.check_app_id(result.get("package_name"))
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
for perm in result["permissions"]:
|
||||
if perm["name"] == "REQUEST_INSTALL_PACKAGES" and perm["access"] == "allow":
|
||||
self.log.info("Package %s with REQUEST_INSTALL_PACKAGES permission",
|
||||
result["package_name"])
|
||||
|
||||
def run(self) -> None:
|
||||
self._adb_connect()
|
||||
output = self._adb_command("dumpsys appops")
|
||||
self._adb_disconnect()
|
||||
|
||||
self.results = parse_dumpsys_appops(output)
|
||||
|
||||
self.log.info("Extracted a total of %d records from app-ops manager",
|
||||
len(self.results))
|
||||
51
mvt/android/modules/adb/dumpsys_battery_daily.py
Normal file
51
mvt/android/modules/adb/dumpsys_battery_daily.py
Normal file
@@ -0,0 +1,51 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_battery_daily
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DumpsysBatteryDaily(AndroidExtraction):
|
||||
"""This module extracts records from battery daily updates."""
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record: dict) -> None:
|
||||
return {
|
||||
"timestamp": record["from"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "battery_daily",
|
||||
"data": f"Recorded update of package {record['package_name']} with vers {record['vers']}"
|
||||
}
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_app_id(result["package_name"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self) -> None:
|
||||
self._adb_connect()
|
||||
output = self._adb_command("dumpsys batterystats --daily")
|
||||
self._adb_disconnect()
|
||||
|
||||
self.results = parse_dumpsys_battery_daily(output)
|
||||
|
||||
self.log.info("Extracted %d records from battery daily stats", len(self.results))
|
||||
43
mvt/android/modules/adb/dumpsys_battery_history.py
Normal file
43
mvt/android/modules/adb/dumpsys_battery_history.py
Normal file
@@ -0,0 +1,43 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_battery_history
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DumpsysBatteryHistory(AndroidExtraction):
|
||||
"""This module extracts records from battery history events."""
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_app_id(result["package_name"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self) -> None:
|
||||
self._adb_connect()
|
||||
output = self._adb_command("dumpsys batterystats --history")
|
||||
self._adb_disconnect()
|
||||
|
||||
self.results = parse_dumpsys_battery_history(output)
|
||||
|
||||
self.log.info("Extracted %d records from battery history", len(self.results))
|
||||
@@ -1,45 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
class DumpsysBatterystats(AndroidExtraction):
|
||||
"""This module extracts stats on battery consumption by processes."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
|
||||
stats = self._adb_command("dumpsys batterystats")
|
||||
if self.output_folder:
|
||||
stats_path = os.path.join(self.output_folder,
|
||||
"dumpsys_batterystats.txt")
|
||||
with open(stats_path, "w") as handle:
|
||||
handle.write(stats)
|
||||
|
||||
log.info("Records from dumpsys batterystats stored at %s",
|
||||
stats_path)
|
||||
|
||||
history = self._adb_command("dumpsys batterystats --history")
|
||||
if self.output_folder:
|
||||
history_path = os.path.join(self.output_folder,
|
||||
"dumpsys_batterystats_history.txt")
|
||||
with open(history_path, "w") as handle:
|
||||
handle.write(history)
|
||||
|
||||
log.info("History records from dumpsys batterystats stored at %s",
|
||||
history_path)
|
||||
|
||||
self._adb_disconnect()
|
||||
48
mvt/android/modules/adb/dumpsys_dbinfo.py
Normal file
48
mvt/android/modules/adb/dumpsys_dbinfo.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_dbinfo
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DumpsysDBInfo(AndroidExtraction):
|
||||
"""This module extracts records from battery daily updates."""
|
||||
|
||||
slug = "dumpsys_dbinfo"
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
path = result.get("path", "")
|
||||
for part in path.split("/"):
|
||||
ioc = self.indicators.check_app_id(part)
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self) -> None:
|
||||
self._adb_connect()
|
||||
output = self._adb_command("dumpsys dbinfo")
|
||||
self._adb_disconnect()
|
||||
|
||||
self.results = parse_dumpsys_dbinfo(output)
|
||||
|
||||
self.log.info("Extracted a total of %d records from database information",
|
||||
len(self.results))
|
||||
@@ -1,5 +1,5 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
@@ -10,26 +10,26 @@ from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DumpsysFull(AndroidExtraction):
|
||||
"""This module extracts stats on battery consumption by processes."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def run(self):
|
||||
def run(self) -> None:
|
||||
self._adb_connect()
|
||||
|
||||
stats = self._adb_command("dumpsys")
|
||||
if self.output_folder:
|
||||
stats_path = os.path.join(self.output_folder,
|
||||
"dumpsys.txt")
|
||||
with open(stats_path, "w") as handle:
|
||||
handle.write(stats)
|
||||
output = self._adb_command("dumpsys")
|
||||
if self.results_path:
|
||||
output_path = os.path.join(self.results_path, "dumpsys.txt")
|
||||
with open(output_path, "w", encoding="utf-8") as handle:
|
||||
handle.write(output)
|
||||
|
||||
log.info("Full dumpsys output stored at %s",
|
||||
stats_path)
|
||||
log.info("Full dumpsys output stored at %s", output_path)
|
||||
|
||||
self._adb_disconnect()
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DumpsysPackages(AndroidExtraction):
|
||||
"""This module extracts details on installed packages."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
|
||||
output = self._adb_command("dumpsys package")
|
||||
|
||||
if self.output_folder:
|
||||
packages_path = os.path.join(self.output_folder,
|
||||
"dumpsys_packages.txt")
|
||||
with open(packages_path, "w") as handle:
|
||||
handle.write(output)
|
||||
|
||||
log.info("Records from dumpsys package stored at %s",
|
||||
packages_path)
|
||||
|
||||
self._adb_disconnect()
|
||||
@@ -1,35 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
class DumpsysProcstats(AndroidExtraction):
|
||||
"""This module extracts stats on memory consumption by processes."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
|
||||
output = self._adb_command("dumpsys procstats")
|
||||
if self.output_folder:
|
||||
procstats_path = os.path.join(self.output_folder,
|
||||
"dumpsys_procstats.txt")
|
||||
with open(procstats_path, "w") as handle:
|
||||
handle.write(output)
|
||||
|
||||
log.info("Records from dumpsys procstats stored at %s",
|
||||
procstats_path)
|
||||
|
||||
self._adb_disconnect()
|
||||
@@ -1,87 +1,67 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_receiver_resolver_table
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
ACTION_NEW_OUTGOING_SMS = "android.provider.Telephony.NEW_OUTGOING_SMS"
|
||||
ACTION_SMS_RECEIVED = "android.provider.Telephony.SMS_RECEIVED"
|
||||
ACTION_DATA_SMS_RECEIVED = "android.intent.action.DATA_SMS_RECEIVED"
|
||||
ACTION_PHONE_STATE = "android.intent.action.PHONE_STATE"
|
||||
INTENT_NEW_OUTGOING_SMS = "android.provider.Telephony.NEW_OUTGOING_SMS"
|
||||
INTENT_SMS_RECEIVED = "android.provider.Telephony.SMS_RECEIVED"
|
||||
INTENT_DATA_SMS_RECEIVED = "android.intent.action.DATA_SMS_RECEIVED"
|
||||
INTENT_PHONE_STATE = "android.intent.action.PHONE_STATE"
|
||||
INTENT_NEW_OUTGOING_CALL = "android.intent.action.NEW_OUTGOING_CALL"
|
||||
|
||||
|
||||
class DumpsysReceivers(AndroidExtraction):
|
||||
"""This module extracts details on receivers for risky activities."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def run(self):
|
||||
self.results = results if results else {}
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for intent, receivers in self.results.items():
|
||||
for receiver in receivers:
|
||||
if intent == INTENT_NEW_OUTGOING_SMS:
|
||||
self.log.info("Found a receiver to intercept outgoing SMS messages: \"%s\"",
|
||||
receiver["receiver"])
|
||||
elif intent == INTENT_SMS_RECEIVED:
|
||||
self.log.info("Found a receiver to intercept incoming SMS messages: \"%s\"",
|
||||
receiver["receiver"])
|
||||
elif intent == INTENT_DATA_SMS_RECEIVED:
|
||||
self.log.info("Found a receiver to intercept incoming data SMS message: \"%s\"",
|
||||
receiver["receiver"])
|
||||
elif intent == INTENT_PHONE_STATE:
|
||||
self.log.info("Found a receiver monitoring telephony state/incoming calls: \"%s\"",
|
||||
receiver["receiver"])
|
||||
elif intent == INTENT_NEW_OUTGOING_CALL:
|
||||
self.log.info("Found a receiver monitoring outgoing calls: \"%s\"",
|
||||
receiver["receiver"])
|
||||
|
||||
ioc = self.indicators.check_app_id(receiver["package_name"])
|
||||
if ioc:
|
||||
receiver["matched_indicator"] = ioc
|
||||
self.detected.append({intent: receiver})
|
||||
continue
|
||||
|
||||
def run(self) -> None:
|
||||
self._adb_connect()
|
||||
|
||||
output = self._adb_command("dumpsys package")
|
||||
if not output:
|
||||
return
|
||||
|
||||
activity = None
|
||||
for line in output.split("\n"):
|
||||
# Find activity block markers.
|
||||
if line.strip().startswith(ACTION_NEW_OUTGOING_SMS):
|
||||
activity = ACTION_NEW_OUTGOING_SMS
|
||||
continue
|
||||
elif line.strip().startswith(ACTION_SMS_RECEIVED):
|
||||
activity = ACTION_SMS_RECEIVED
|
||||
continue
|
||||
elif line.strip().startswith(ACTION_PHONE_STATE):
|
||||
activity = ACTION_PHONE_STATE
|
||||
continue
|
||||
elif line.strip().startswith(ACTION_DATA_SMS_RECEIVED):
|
||||
activity = ACTION_DATA_SMS_RECEIVED
|
||||
continue
|
||||
|
||||
# If we are not in an activity block yet, skip.
|
||||
if not activity:
|
||||
continue
|
||||
|
||||
# If we are in a block but the line does not start with 8 spaces
|
||||
# it means the block ended a new one started, so we reset and
|
||||
# continue.
|
||||
if not line.startswith(" " * 8):
|
||||
activity = None
|
||||
continue
|
||||
|
||||
# If we got this far, we are processing receivers for the
|
||||
# activities we are interested in.
|
||||
receiver = line.strip().split(" ")[1]
|
||||
package_name = receiver.split("/")[0]
|
||||
if package_name == "com.google.android.gms":
|
||||
continue
|
||||
|
||||
if activity == ACTION_NEW_OUTGOING_SMS:
|
||||
self.log.info("Found a receiver to intercept outgoing SMS messages: \"%s\"",
|
||||
receiver)
|
||||
elif activity == ACTION_SMS_RECEIVED:
|
||||
self.log.info("Found a receiver to intercept incoming SMS messages: \"%s\"",
|
||||
receiver)
|
||||
elif activity == ACTION_DATA_SMS_RECEIVED:
|
||||
self.log.info("Found a receiver to intercept incoming data SMS message: \"%s\"",
|
||||
receiver)
|
||||
elif activity == ACTION_PHONE_STATE:
|
||||
self.log.info("Found a receiver monitoring telephony state: \"%s\"",
|
||||
receiver)
|
||||
|
||||
self.results.append({
|
||||
"activity": activity,
|
||||
"package_name": package_name,
|
||||
"receiver": receiver,
|
||||
})
|
||||
self.results = parse_dumpsys_receiver_resolver_table(output)
|
||||
|
||||
self._adb_disconnect()
|
||||
|
||||
@@ -1,33 +1,127 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
|
||||
from mvt.common.utils import convert_timestamp_to_iso
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
ANDROID_TMP_FOLDERS = [
|
||||
"/tmp/",
|
||||
"/data/local/tmp/",
|
||||
]
|
||||
ANDROID_MEDIA_FOLDERS = [
|
||||
"/data/media/0",
|
||||
"/sdcard/",
|
||||
]
|
||||
|
||||
|
||||
class Files(AndroidExtraction):
|
||||
"""This module extracts the list of installed packages."""
|
||||
"""This module extracts the list of files on the device."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
self.full_find = False
|
||||
|
||||
def run(self):
|
||||
def serialize(self, record: dict) -> None:
|
||||
if "modified_time" in record:
|
||||
return {
|
||||
"timestamp": record["modified_time"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "file_modified",
|
||||
"data": record["path"],
|
||||
}
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
for result in self.results:
|
||||
if result.get("is_suid"):
|
||||
self.log.warning("Found an SUID file in a non-standard directory \"%s\".",
|
||||
result["path"])
|
||||
|
||||
if self.indicators and self.indicators.check_file_path(result["path"]):
|
||||
self.log.warning("Found a known suspicous file at path: \"%s\"", result["path"])
|
||||
self.detected.append(result)
|
||||
|
||||
def backup_file(self, file_path: str) -> None:
|
||||
local_file_name = file_path.replace("/", "_").replace(" ", "-")
|
||||
local_files_folder = os.path.join(self.results_path, "files")
|
||||
if not os.path.exists(local_files_folder):
|
||||
os.mkdir(local_files_folder)
|
||||
|
||||
local_file_path = os.path.join(local_files_folder, local_file_name)
|
||||
|
||||
try:
|
||||
self._adb_download(remote_path=file_path,
|
||||
local_path=local_file_path)
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
self.log.info("Downloaded file %s to local copy at %s",
|
||||
file_path, local_file_path)
|
||||
|
||||
def find_files(self, folder: str) -> None:
|
||||
if self.full_find:
|
||||
output = self._adb_command(f"find '{folder}' -type f -printf '%T@ %m %s %u %g %p\n' 2> /dev/null")
|
||||
|
||||
for file_line in output.splitlines():
|
||||
[unix_timestamp, mode, size, owner, group, full_path] = file_line.rstrip().split(" ", 5)
|
||||
mod_time = convert_timestamp_to_iso(datetime.datetime.utcfromtimestamp(int(float(unix_timestamp))))
|
||||
|
||||
self.results.append({
|
||||
"path": full_path,
|
||||
"modified_time": mod_time,
|
||||
"mode": mode,
|
||||
"is_suid": (int(mode, 8) & stat.S_ISUID) == 2048,
|
||||
"is_sgid": (int(mode, 8) & stat.S_ISGID) == 1024,
|
||||
"size": size,
|
||||
"owner": owner,
|
||||
"group": group,
|
||||
})
|
||||
else:
|
||||
output = self._adb_command(f"find '{folder}' -type f 2> /dev/null")
|
||||
for file_line in output.splitlines():
|
||||
self.results.append({"path": file_line.rstrip()})
|
||||
|
||||
def run(self) -> None:
|
||||
self._adb_connect()
|
||||
|
||||
output = self._adb_command("find / -type f 2> /dev/null")
|
||||
if output and self.output_folder:
|
||||
files_txt_path = os.path.join(self.output_folder, "files.txt")
|
||||
with open(files_txt_path, "w") as handle:
|
||||
handle.write(output)
|
||||
output = self._adb_command("find '/' -maxdepth 1 -printf '%T@ %m %s %u %g %p\n' 2> /dev/null")
|
||||
if output or output.strip().splitlines():
|
||||
self.full_find = True
|
||||
|
||||
log.info("List of visible files stored at %s", files_txt_path)
|
||||
for tmp_folder in ANDROID_TMP_FOLDERS:
|
||||
self.find_files(tmp_folder)
|
||||
|
||||
for entry in self.results:
|
||||
self.log.info("Found file in tmp folder at path %s",
|
||||
entry.get("path"))
|
||||
if self.results_path:
|
||||
self.backup_file(entry.get("path"))
|
||||
|
||||
for media_folder in ANDROID_MEDIA_FOLDERS:
|
||||
self.find_files(media_folder)
|
||||
|
||||
self.log.info("Found %s files in primary Android tmp and media folders",
|
||||
len(self.results))
|
||||
|
||||
if self.fast_mode:
|
||||
self.log.info("Flag --fast was enabled: skipping full file listing")
|
||||
else:
|
||||
self.log.info("Processing full file listing. This may take a while...")
|
||||
self.find_files("/")
|
||||
|
||||
self.log.info("Found %s total files", len(self.results))
|
||||
|
||||
self._adb_disconnect()
|
||||
|
||||
43
mvt/android/modules/adb/getprop.py
Normal file
43
mvt/android/modules/adb/getprop.py
Normal file
@@ -0,0 +1,43 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from mvt.android.parsers import parse_getprop
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Getprop(AndroidExtraction):
|
||||
"""This module extracts device properties from getprop command."""
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.results = {} if not results else results
|
||||
|
||||
def run(self) -> None:
|
||||
self._adb_connect()
|
||||
output = self._adb_command("getprop")
|
||||
self._adb_disconnect()
|
||||
|
||||
self.results = parse_getprop(output)
|
||||
|
||||
# Alert if phone is outdated.
|
||||
security_patch = self.results.get("ro.build.version.security_patch", "")
|
||||
if security_patch:
|
||||
patch_date = datetime.strptime(security_patch, "%Y-%m-%d")
|
||||
if (datetime.now() - patch_date) > timedelta(days=6*30):
|
||||
self.log.warning("This phone has not received security updates for more than "
|
||||
"six months (last update: %s)", security_patch)
|
||||
|
||||
self.log.info("Extracted %d Android system properties", len(self.results))
|
||||
@@ -1,5 +1,5 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
@@ -14,13 +14,14 @@ log = logging.getLogger(__name__)
|
||||
class Logcat(AndroidExtraction):
|
||||
"""This module extracts details on installed packages."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def run(self):
|
||||
def run(self) -> None:
|
||||
self._adb_connect()
|
||||
|
||||
# Get the current logcat.
|
||||
@@ -28,18 +29,18 @@ class Logcat(AndroidExtraction):
|
||||
# Get the locat prior to last reboot.
|
||||
last_output = self._adb_command("logcat -L")
|
||||
|
||||
if self.output_folder:
|
||||
logcat_path = os.path.join(self.output_folder,
|
||||
if self.results_path:
|
||||
logcat_path = os.path.join(self.results_path,
|
||||
"logcat.txt")
|
||||
with open(logcat_path, "w") as handle:
|
||||
with open(logcat_path, "w", encoding="utf-8") as handle:
|
||||
handle.write(output)
|
||||
|
||||
log.info("Current logcat logs stored at %s",
|
||||
logcat_path)
|
||||
|
||||
logcat_last_path = os.path.join(self.output_folder,
|
||||
logcat_last_path = os.path.join(self.results_path,
|
||||
"logcat_last.txt")
|
||||
with open(logcat_last_path, "w") as handle:
|
||||
with open(logcat_last_path, "w", encoding="utf-8") as handle:
|
||||
handle.write(last_output)
|
||||
|
||||
log.info("Logcat logs prior to last reboot stored at %s",
|
||||
|
||||
@@ -1,27 +1,85 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import pkg_resources
|
||||
from rich.console import Console
|
||||
from rich.progress import track
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
|
||||
from mvt.common.virustotal import VTNoKey, VTQuotaExceeded, virustotal_lookup
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
DANGEROUS_PERMISSIONS_THRESHOLD = 10
|
||||
DANGEROUS_PERMISSIONS = [
|
||||
"android.permission.ACCESS_COARSE_LOCATION",
|
||||
"android.permission.ACCESS_FINE_LOCATION",
|
||||
"android.permission.AUTHENTICATE_ACCOUNTS",
|
||||
"android.permission.CAMERA",
|
||||
"android.permission.DISABLE_KEYGUARD",
|
||||
"android.permission.PROCESS_OUTGOING_CALLS",
|
||||
"android.permission.READ_CALENDAR",
|
||||
"android.permission.READ_CALL_LOG",
|
||||
"android.permission.READ_CONTACTS",
|
||||
"android.permission.READ_PHONE_STATE",
|
||||
"android.permission.READ_SMS",
|
||||
"android.permission.RECEIVE_MMS",
|
||||
"android.permission.RECEIVE_SMS",
|
||||
"android.permission.RECEIVE_WAP_PUSH",
|
||||
"android.permission.RECORD_AUDIO",
|
||||
"android.permission.SEND_SMS",
|
||||
"android.permission.SYSTEM_ALERT_WINDOW",
|
||||
"android.permission.USE_CREDENTIALS",
|
||||
"android.permission.USE_SIP",
|
||||
"com.android.browser.permission.READ_HISTORY_BOOKMARKS",
|
||||
]
|
||||
|
||||
ROOT_PACKAGES = [
|
||||
"com.noshufou.android.su",
|
||||
"com.noshufou.android.su.elite",
|
||||
"eu.chainfire.supersu",
|
||||
"com.koushikdutta.superuser",
|
||||
"com.thirdparty.superuser",
|
||||
"com.yellowes.su",
|
||||
"com.koushikdutta.rommanager",
|
||||
"com.koushikdutta.rommanager.license",
|
||||
"com.dimonvideo.luckypatcher",
|
||||
"com.chelpus.lackypatch",
|
||||
"com.ramdroid.appquarantine",
|
||||
"com.ramdroid.appquarantinepro",
|
||||
"com.devadvance.rootcloak",
|
||||
"com.devadvance.rootcloakplus",
|
||||
"de.robv.android.xposed.installer",
|
||||
"com.saurik.substrate",
|
||||
"com.zachspong.temprootremovejb",
|
||||
"com.amphoras.hidemyroot",
|
||||
"com.amphoras.hidemyrootadfree",
|
||||
"com.formyhm.hiderootPremium",
|
||||
"com.formyhm.hideroot",
|
||||
"me.phh.superuser",
|
||||
"eu.chainfire.supersu.pro",
|
||||
"com.kingouser.com",
|
||||
"com.topjohnwu.magisk",
|
||||
]
|
||||
|
||||
|
||||
class Packages(AndroidExtraction):
|
||||
"""This module extracts the list of installed packages."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
def serialize(self, record: dict) -> None:
|
||||
records = []
|
||||
|
||||
timestamps = [
|
||||
@@ -40,40 +98,130 @@ class Packages(AndroidExtraction):
|
||||
|
||||
return records
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
root_packages_path = os.path.join("..", "..", "data", "root_packages.txt")
|
||||
root_packages_string = pkg_resources.resource_string(__name__, root_packages_path)
|
||||
root_packages = root_packages_string.decode("utf-8").split("\n")
|
||||
root_packages = [rp.strip() for rp in root_packages]
|
||||
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
for result in self.results:
|
||||
if result["package_name"] in root_packages:
|
||||
if result["package_name"] in ROOT_PACKAGES:
|
||||
self.log.warning("Found an installed package related to rooting/jailbreaking: \"%s\"",
|
||||
result["package_name"])
|
||||
self.detected.append(result)
|
||||
if result["package_name"] in self.indicators.ioc_app_ids:
|
||||
self.log.warning("Found a malicious package name: \"%s\"",
|
||||
result["package_name"])
|
||||
self.detected.append(result)
|
||||
for file in result["files"]:
|
||||
if file["sha256"] in self.indicators.ioc_files_sha256:
|
||||
self.log.warning("Found a malicious APK: \"%s\" %s",
|
||||
result["package_name"],
|
||||
file["sha256"])
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def _get_files_for_package(self, package_name):
|
||||
if not self.indicators:
|
||||
continue
|
||||
|
||||
ioc = self.indicators.check_app_id(result.get("package_name"))
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
for package_file in result.get("files", []):
|
||||
ioc = self.indicators.check_file_hash(package_file["sha256"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
|
||||
@staticmethod
|
||||
def check_virustotal(packages: list) -> None:
|
||||
hashes = []
|
||||
for package in packages:
|
||||
for file in package.get("files", []):
|
||||
if file["sha256"] not in hashes:
|
||||
hashes.append(file["sha256"])
|
||||
|
||||
total_hashes = len(hashes)
|
||||
detections = {}
|
||||
|
||||
for i in track(range(total_hashes), description=f"Looking up {total_hashes} files..."):
|
||||
try:
|
||||
results = virustotal_lookup(hashes[i])
|
||||
except VTNoKey as e:
|
||||
log.info(e)
|
||||
return
|
||||
except VTQuotaExceeded as e:
|
||||
log.error("Unable to continue: %s", e)
|
||||
break
|
||||
|
||||
if not results:
|
||||
continue
|
||||
|
||||
positives = results["attributes"]["last_analysis_stats"]["malicious"]
|
||||
total = len(results["attributes"]["last_analysis_results"])
|
||||
|
||||
detections[hashes[i]] = f"{positives}/{total}"
|
||||
|
||||
table = Table(title="VirusTotal Packages Detections")
|
||||
table.add_column("Package name")
|
||||
table.add_column("File path")
|
||||
table.add_column("Detections")
|
||||
|
||||
for package in packages:
|
||||
for file in package.get("files", []):
|
||||
row = [package["package_name"], file["path"]]
|
||||
|
||||
if file["sha256"] in detections:
|
||||
detection = detections[file["sha256"]]
|
||||
positives = detection.split("/")[0]
|
||||
if int(positives) > 0:
|
||||
row.append(Text(detection, "red bold"))
|
||||
else:
|
||||
row.append(detection)
|
||||
else:
|
||||
row.append("not found")
|
||||
|
||||
table.add_row(*row)
|
||||
|
||||
console = Console()
|
||||
console.print(table)
|
||||
|
||||
@staticmethod
|
||||
def parse_package_for_details(output: str) -> dict:
|
||||
details = {
|
||||
"uid": "",
|
||||
"version_name": "",
|
||||
"version_code": "",
|
||||
"timestamp": "",
|
||||
"first_install_time": "",
|
||||
"last_update_time": "",
|
||||
"requested_permissions": [],
|
||||
}
|
||||
|
||||
in_permissions = False
|
||||
for line in output.splitlines():
|
||||
if in_permissions:
|
||||
if line.startswith(" " * 4) and not line.startswith(" " * 6):
|
||||
in_permissions = False
|
||||
continue
|
||||
|
||||
permission = line.strip().split(":")[0]
|
||||
details["requested_permissions"].append(permission)
|
||||
|
||||
if line.strip().startswith("userId="):
|
||||
details["uid"] = line.split("=")[1].strip()
|
||||
elif line.strip().startswith("versionName="):
|
||||
details["version_name"] = line.split("=")[1].strip()
|
||||
elif line.strip().startswith("versionCode="):
|
||||
details["version_code"] = line.split("=", 1)[1].strip()
|
||||
elif line.strip().startswith("timeStamp="):
|
||||
details["timestamp"] = line.split("=")[1].strip()
|
||||
elif line.strip().startswith("firstInstallTime="):
|
||||
details["first_install_time"] = line.split("=")[1].strip()
|
||||
elif line.strip().startswith("lastUpdateTime="):
|
||||
details["last_update_time"] = line.split("=")[1].strip()
|
||||
elif line.strip() == "requested permissions:":
|
||||
in_permissions = True
|
||||
continue
|
||||
|
||||
return details
|
||||
|
||||
def _get_files_for_package(self, package_name: str) -> list:
|
||||
output = self._adb_command(f"pm path {package_name}")
|
||||
output = output.strip().replace("package:", "")
|
||||
if not output:
|
||||
return []
|
||||
|
||||
package_files = []
|
||||
for file_path in output.split("\n"):
|
||||
for file_path in output.splitlines():
|
||||
file_path = file_path.strip()
|
||||
|
||||
md5 = self._adb_command(f"md5sum {file_path}").split(" ")[0]
|
||||
@@ -91,11 +239,12 @@ class Packages(AndroidExtraction):
|
||||
|
||||
return package_files
|
||||
|
||||
def run(self):
|
||||
def run(self) -> None:
|
||||
self._adb_connect()
|
||||
|
||||
packages = self._adb_command("pm list packages -U -u -i -f")
|
||||
for line in packages.split("\n"):
|
||||
packages = self._adb_command("pm list packages -u -i -f")
|
||||
|
||||
for line in packages.splitlines():
|
||||
line = line.strip()
|
||||
if not line.startswith("package:"):
|
||||
continue
|
||||
@@ -111,31 +260,22 @@ class Packages(AndroidExtraction):
|
||||
if installer == "null":
|
||||
installer = None
|
||||
|
||||
try:
|
||||
uid = fields[2].split(":")[1].strip()
|
||||
except IndexError:
|
||||
uid = None
|
||||
|
||||
dumpsys = self._adb_command(f"dumpsys package {package_name} | grep -A2 timeStamp").split("\n")
|
||||
timestamp = dumpsys[0].split("=")[1].strip()
|
||||
first_install = dumpsys[1].split("=")[1].strip()
|
||||
last_update = dumpsys[2].split("=")[1].strip()
|
||||
|
||||
package_files = self._get_files_for_package(package_name)
|
||||
|
||||
self.results.append({
|
||||
new_package = {
|
||||
"package_name": package_name,
|
||||
"file_name": file_name,
|
||||
"installer": installer,
|
||||
"timestamp": timestamp,
|
||||
"first_install_time": first_install,
|
||||
"last_update_time": last_update,
|
||||
"uid": uid,
|
||||
"disabled": False,
|
||||
"system": False,
|
||||
"third_party": False,
|
||||
"files": package_files,
|
||||
})
|
||||
}
|
||||
|
||||
dumpsys_package = self._adb_command(f"dumpsys package {package_name}")
|
||||
package_details = self.parse_package_for_details(dumpsys_package)
|
||||
new_package.update(package_details)
|
||||
|
||||
self.results.append(new_package)
|
||||
|
||||
cmds = [
|
||||
{"field": "disabled", "arg": "-d"},
|
||||
@@ -144,7 +284,7 @@ class Packages(AndroidExtraction):
|
||||
]
|
||||
for cmd in cmds:
|
||||
output = self._adb_command(f"pm list packages {cmd['arg']}")
|
||||
for line in output.split("\n"):
|
||||
for line in output.splitlines():
|
||||
line = line.strip()
|
||||
if not line.startswith("package:"):
|
||||
continue
|
||||
@@ -154,13 +294,31 @@ class Packages(AndroidExtraction):
|
||||
if result["package_name"] == package_name:
|
||||
self.results[i][cmd["field"]] = True
|
||||
|
||||
for result in self.results:
|
||||
if not result["third_party"]:
|
||||
continue
|
||||
|
||||
dangerous_permissions_count = 0
|
||||
for perm in result["requested_permissions"]:
|
||||
if perm in DANGEROUS_PERMISSIONS:
|
||||
dangerous_permissions_count += 1
|
||||
|
||||
if dangerous_permissions_count >= DANGEROUS_PERMISSIONS_THRESHOLD:
|
||||
self.log.info("Third-party package \"%s\" requested %d potentially dangerous permissions",
|
||||
result["package_name"], dangerous_permissions_count)
|
||||
|
||||
packages_to_lookup = []
|
||||
for result in self.results:
|
||||
if result["system"]:
|
||||
continue
|
||||
|
||||
packages_to_lookup.append(result)
|
||||
self.log.info("Found non-system package with name \"%s\" installed by \"%s\" on %s",
|
||||
result["package_name"], result["installer"], result["timestamp"])
|
||||
|
||||
if not self.fast_mode:
|
||||
self.check_virustotal(packages_to_lookup)
|
||||
|
||||
self.log.info("Extracted at total of %d installed package names",
|
||||
len(self.results))
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
@@ -9,21 +9,33 @@ from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Processes(AndroidExtraction):
|
||||
"""This module extracts details on running processes."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def run(self):
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_app_id(result.get("name", ""))
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
|
||||
def run(self) -> None:
|
||||
self._adb_connect()
|
||||
|
||||
output = self._adb_command("ps -e")
|
||||
|
||||
for line in output.split("\n")[1:]:
|
||||
for line in output.splitlines()[1:]:
|
||||
line = line.strip()
|
||||
if line == "":
|
||||
continue
|
||||
|
||||
@@ -1,30 +1,38 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RootBinaries(AndroidExtraction):
|
||||
"""This module extracts the list of installed packages."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def run(self):
|
||||
root_binaries_path = os.path.join("..", "..", "data", "root_binaries.txt")
|
||||
root_binaries_string = pkg_resources.resource_string(__name__, root_binaries_path)
|
||||
root_binaries = root_binaries_string.decode("utf-8").split("\n")
|
||||
def run(self) -> None:
|
||||
root_binaries = [
|
||||
"su",
|
||||
"busybox",
|
||||
"supersu",
|
||||
"Superuser.apk",
|
||||
"KingoUser.apk",
|
||||
"SuperSu.apk",
|
||||
"magisk",
|
||||
"magiskhide",
|
||||
"magiskinit",
|
||||
"magiskpolicy",
|
||||
]
|
||||
|
||||
self._adb_connect()
|
||||
|
||||
38
mvt/android/modules/adb/selinux_status.py
Normal file
38
mvt/android/modules/adb/selinux_status.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SELinuxStatus(AndroidExtraction):
|
||||
"""This module checks if SELinux is being enforced."""
|
||||
|
||||
slug = "selinux_status"
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.results = {} if not results else results
|
||||
|
||||
def run(self) -> None:
|
||||
self._adb_connect()
|
||||
output = self._adb_command("getenforce")
|
||||
self._adb_disconnect()
|
||||
|
||||
status = output.lower().strip()
|
||||
self.results["status"] = status
|
||||
|
||||
if status == "enforcing":
|
||||
self.log.info("SELinux is being regularly enforced")
|
||||
else:
|
||||
self.log.warning("SELinux status is \"%s\"!", status)
|
||||
106
mvt/android/modules/adb/settings.py
Normal file
106
mvt/android/modules/adb/settings.py
Normal file
@@ -0,0 +1,106 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
ANDROID_DANGEROUS_SETTINGS = [
|
||||
{
|
||||
"description": "disabled Google Play Services apps verification",
|
||||
"key": "verifier_verify_adb_installs",
|
||||
"safe_value": "1",
|
||||
},
|
||||
{
|
||||
"description": "disabled Google Play Protect",
|
||||
"key": "package_verifier_enable",
|
||||
"safe_value": "1",
|
||||
},
|
||||
{
|
||||
"description": "disabled Google Play Protect",
|
||||
"key": "package_verifier_user_consent",
|
||||
"safe_value": "1",
|
||||
},
|
||||
{
|
||||
"description": "disabled Google Play Protect",
|
||||
"key": "upload_apk_enable",
|
||||
"safe_value": "1",
|
||||
},
|
||||
{
|
||||
"description": "disabled confirmation of adb apps installation",
|
||||
"key": "adb_install_need_confirm",
|
||||
"safe_value": "1",
|
||||
},
|
||||
{
|
||||
"description": "disabled sharing of security reports",
|
||||
"key": "send_security_reports",
|
||||
"safe_value": "1",
|
||||
},
|
||||
{
|
||||
"description": "disabled sharing of crash logs with manufacturer",
|
||||
"key": "samsung_errorlog_agree",
|
||||
"safe_value": "1",
|
||||
},
|
||||
{
|
||||
"description": "disabled applications errors reports",
|
||||
"key": "send_action_app_error",
|
||||
"safe_value": "1",
|
||||
},
|
||||
{
|
||||
"description": "enabled installation of non Google Play apps",
|
||||
"key": "install_non_market_apps",
|
||||
"safe_value": "0",
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
class Settings(AndroidExtraction):
|
||||
"""This module extracts Android system settings."""
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.results = {} if not results else results
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
for namespace, settings in self.results.items():
|
||||
for key, value in settings.items():
|
||||
for danger in ANDROID_DANGEROUS_SETTINGS:
|
||||
# Check if one of the dangerous settings is using an unsafe
|
||||
# value (different than the one specified).
|
||||
if danger["key"] == key and danger["safe_value"] != value:
|
||||
self.log.warning("Found suspicious setting \"%s = %s\" (%s)",
|
||||
key, value, danger["description"])
|
||||
break
|
||||
|
||||
def run(self) -> None:
|
||||
self._adb_connect()
|
||||
|
||||
for namespace in ["system", "secure", "global"]:
|
||||
out = self._adb_command(f"cmd settings list {namespace}")
|
||||
if not out:
|
||||
continue
|
||||
|
||||
self.results[namespace] = {}
|
||||
|
||||
for line in out.splitlines():
|
||||
line = line.strip()
|
||||
if line == "":
|
||||
continue
|
||||
|
||||
fields = line.split("=", 1)
|
||||
try:
|
||||
self.results[namespace][fields[0]] = fields[1]
|
||||
except IndexError:
|
||||
continue
|
||||
|
||||
self._adb_disconnect()
|
||||
@@ -1,5 +1,5 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
@@ -7,6 +7,9 @@ import logging
|
||||
import os
|
||||
import sqlite3
|
||||
|
||||
from mvt.android.parsers.backup import (AndroidBackupParsingError,
|
||||
parse_tar_for_sms)
|
||||
from mvt.common.module import InsufficientPrivileges
|
||||
from mvt.common.utils import check_for_links, convert_timestamp_to_iso
|
||||
|
||||
from .base import AndroidExtraction
|
||||
@@ -15,12 +18,12 @@ log = logging.getLogger(__name__)
|
||||
|
||||
SMS_BUGLE_PATH = "data/data/com.google.android.apps.messaging/databases/bugle_db"
|
||||
SMS_BUGLE_QUERY = """
|
||||
SELECT
|
||||
ppl.normalized_destination AS number,
|
||||
SELECT
|
||||
ppl.normalized_destination AS address,
|
||||
p.timestamp AS timestamp,
|
||||
CASE WHEN m.sender_id IN
|
||||
CASE WHEN m.sender_id IN
|
||||
(SELECT _id FROM participants WHERE contact_id=-1)
|
||||
THEN 2 ELSE 1 END incoming, p.text AS text
|
||||
THEN 2 ELSE 1 END incoming, p.text AS body
|
||||
FROM messages m, conversations c, parts p,
|
||||
participants ppl, conversation_participants cp
|
||||
WHERE (m.conversation_id = c._id)
|
||||
@@ -31,45 +34,48 @@ WHERE (m.conversation_id = c._id)
|
||||
|
||||
SMS_MMSSMS_PATH = "data/data/com.android.providers.telephony/databases/mmssms.db"
|
||||
SMS_MMSMS_QUERY = """
|
||||
SELECT
|
||||
address AS number,
|
||||
SELECT
|
||||
address AS address,
|
||||
date_sent AS timestamp,
|
||||
type as incoming,
|
||||
body AS text
|
||||
body AS body
|
||||
FROM sms;
|
||||
"""
|
||||
|
||||
|
||||
class SMS(AndroidExtraction):
|
||||
"""This module extracts all SMS messages containing links."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
text = record["text"].replace("\n", "\\n")
|
||||
def serialize(self, record: dict) -> None:
|
||||
body = record["body"].replace("\n", "\\n")
|
||||
return {
|
||||
"timestamp": record["isodate"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": f"sms_{record['direction']}",
|
||||
"data": f"{record['number']}: \"{text}\""
|
||||
"data": f"{record.get('address', 'unknown source')}: \"{body}\""
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for message in self.results:
|
||||
if not "text" in message:
|
||||
if "body" not in message:
|
||||
continue
|
||||
|
||||
message_links = check_for_links(message["text"])
|
||||
# TODO: check links exported from the body previously.
|
||||
message_links = check_for_links(message["body"])
|
||||
if self.indicators.check_domains(message_links):
|
||||
self.detected.append(message)
|
||||
|
||||
def _parse_db(self, db_path):
|
||||
def _parse_db(self, db_path: str) -> None:
|
||||
"""Parse an Android bugle_db SMS database file.
|
||||
|
||||
:param db_path: Path to the Android SMS database file to process
|
||||
@@ -77,10 +83,10 @@ class SMS(AndroidExtraction):
|
||||
"""
|
||||
conn = sqlite3.connect(db_path)
|
||||
cur = conn.cursor()
|
||||
|
||||
if (self.SMS_DB_TYPE == 1):
|
||||
|
||||
if self.SMS_DB_TYPE == 1:
|
||||
cur.execute(SMS_BUGLE_QUERY)
|
||||
elif (self.SMS_DB_TYPE == 2):
|
||||
elif self.SMS_DB_TYPE == 2:
|
||||
cur.execute(SMS_MMSMS_QUERY)
|
||||
|
||||
names = [description[0] for description in cur.description]
|
||||
@@ -95,7 +101,7 @@ class SMS(AndroidExtraction):
|
||||
|
||||
# If we find links in the messages or if they are empty we add
|
||||
# them to the list of results.
|
||||
if check_for_links(message["text"]) or message["text"].strip() == "":
|
||||
if check_for_links(message["body"]) or message["body"].strip() == "":
|
||||
self.results.append(message)
|
||||
|
||||
cur.close()
|
||||
@@ -103,12 +109,39 @@ class SMS(AndroidExtraction):
|
||||
|
||||
log.info("Extracted a total of %d SMS messages containing links", len(self.results))
|
||||
|
||||
def run(self):
|
||||
if (self._adb_check_file_exists(os.path.join("/", SMS_BUGLE_PATH))):
|
||||
self.SMS_DB_TYPE = 1
|
||||
self._adb_process_file(os.path.join("/", SMS_BUGLE_PATH), self._parse_db)
|
||||
elif (self._adb_check_file_exists(os.path.join("/", SMS_MMSSMS_PATH))):
|
||||
self.SMS_DB_TYPE = 2
|
||||
self._adb_process_file(os.path.join("/", SMS_MMSSMS_PATH), self._parse_db)
|
||||
else:
|
||||
self.log.error("No SMS database found")
|
||||
def _extract_sms_adb(self) -> None:
|
||||
"""Use the Android backup command to extract SMS data from the native SMS
|
||||
app.
|
||||
|
||||
It is crucial to use the under-documented "-nocompress" flag to disable
|
||||
the non-standard Java compression algorithm. This module only supports
|
||||
an unencrypted ADB backup.
|
||||
"""
|
||||
backup_tar = self._generate_backup("com.android.providers.telephony")
|
||||
if not backup_tar:
|
||||
return
|
||||
|
||||
try:
|
||||
self.results = parse_tar_for_sms(backup_tar)
|
||||
except AndroidBackupParsingError:
|
||||
self.log.info("Impossible to read SMS from the Android Backup, please extract "\
|
||||
"the SMS and try extracting it with Android Backup Extractor")
|
||||
return
|
||||
|
||||
log.info("Extracted a total of %d SMS messages containing links", len(self.results))
|
||||
|
||||
def run(self) -> None:
|
||||
try:
|
||||
if (self._adb_check_file_exists(os.path.join("/", SMS_BUGLE_PATH))):
|
||||
self.SMS_DB_TYPE = 1
|
||||
self._adb_process_file(os.path.join("/", SMS_BUGLE_PATH), self._parse_db)
|
||||
elif (self._adb_check_file_exists(os.path.join("/", SMS_MMSSMS_PATH))):
|
||||
self.SMS_DB_TYPE = 2
|
||||
self._adb_process_file(os.path.join("/", SMS_MMSSMS_PATH), self._parse_db)
|
||||
return
|
||||
except InsufficientPrivileges:
|
||||
pass
|
||||
|
||||
self.log.warn("No SMS database found. Trying extraction of SMS data using " \
|
||||
"Android backup feature.")
|
||||
self._extract_sms_adb()
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
@@ -16,16 +16,18 @@ log = logging.getLogger(__name__)
|
||||
|
||||
WHATSAPP_PATH = "data/data/com.whatsapp/databases/msgstore.db"
|
||||
|
||||
|
||||
class Whatsapp(AndroidExtraction):
|
||||
"""This module extracts all WhatsApp messages containing links."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
def serialize(self, record: dict) -> None:
|
||||
text = record["data"].replace("\n", "\\n")
|
||||
return {
|
||||
"timestamp": record["isodate"],
|
||||
@@ -34,19 +36,19 @@ class Whatsapp(AndroidExtraction):
|
||||
"data": f"\"{text}\""
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for message in self.results:
|
||||
if not "data" in message:
|
||||
if "data" not in message:
|
||||
continue
|
||||
|
||||
message_links = check_for_links(message["data"])
|
||||
if self.indicators.check_domains(message_links):
|
||||
self.detected.append(message)
|
||||
|
||||
def _parse_db(self, db_path):
|
||||
def _parse_db(self, db_path: str) -> None:
|
||||
"""Parse an Android msgstore.db WhatsApp database file.
|
||||
|
||||
:param db_path: Path to the Android WhatsApp database file to process
|
||||
@@ -83,5 +85,8 @@ class Whatsapp(AndroidExtraction):
|
||||
log.info("Extracted a total of %d WhatsApp messages containing links", len(messages))
|
||||
self.results = messages
|
||||
|
||||
def run(self):
|
||||
self._adb_process_file(os.path.join("/", WHATSAPP_PATH), self._parse_db)
|
||||
def run(self) -> None:
|
||||
try:
|
||||
self._adb_process_file(os.path.join("/", WHATSAPP_PATH), self._parse_db)
|
||||
except Exception as e:
|
||||
self.log.error(e)
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
from .sms import SMS
|
||||
|
||||
BACKUP_MODULES = [SMS,]
|
||||
BACKUP_MODULES = [SMS]
|
||||
|
||||
48
mvt/android/modules/backup/base.py
Normal file
48
mvt/android/modules/backup/base.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import fnmatch
|
||||
import os
|
||||
from tarfile import TarFile
|
||||
|
||||
from mvt.common.module import MVTModule
|
||||
|
||||
|
||||
class BackupExtraction(MVTModule):
|
||||
"""This class provides a base for all backup extractios modules"""
|
||||
ab = None
|
||||
|
||||
def from_folder(self, backup_path: str, files: list) -> None:
|
||||
"""
|
||||
Get all the files and list them
|
||||
"""
|
||||
self.backup_path = backup_path
|
||||
self.files = files
|
||||
|
||||
def from_ab(self, file_path: str, tar: TarFile, files: list) -> None:
|
||||
"""
|
||||
Extract the files
|
||||
"""
|
||||
self.ab = file_path
|
||||
self.tar = tar
|
||||
self.files = files
|
||||
|
||||
def _get_files_by_pattern(self, pattern: str) -> list:
|
||||
return fnmatch.filter(self.files, pattern)
|
||||
|
||||
def _get_file_content(self, file_path: str) -> bytes:
|
||||
if self.ab:
|
||||
try:
|
||||
member = self.tar.getmember(file_path)
|
||||
except KeyError:
|
||||
return None
|
||||
handle = self.tar.extractfile(member)
|
||||
else:
|
||||
handle = open(os.path.join(self.backup_path, file_path), "rb")
|
||||
|
||||
data = handle.read()
|
||||
handle.close()
|
||||
|
||||
return data
|
||||
@@ -1,64 +1,44 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import json
|
||||
import os
|
||||
import zlib
|
||||
import logging
|
||||
|
||||
from mvt.common.module import MVTModule
|
||||
from mvt.common.utils import check_for_links
|
||||
from mvt.android.modules.backup.base import BackupExtraction
|
||||
from mvt.android.parsers.backup import parse_sms_file
|
||||
|
||||
|
||||
class SMS(MVTModule):
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
class SMS(BackupExtraction):
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
self.results = []
|
||||
|
||||
def check_indicators(self):
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for message in self.results:
|
||||
if not "body" in message:
|
||||
if "body" not in message:
|
||||
continue
|
||||
|
||||
message_links = check_for_links(message["body"])
|
||||
if self.indicators.check_domains(message_links):
|
||||
if self.indicators.check_domains(message["links"]):
|
||||
self.detected.append(message)
|
||||
|
||||
def _process_sms_file(self, file_path):
|
||||
self.log.info("Processing SMS backup file at %s", file_path)
|
||||
def run(self) -> None:
|
||||
for file in self._get_files_by_pattern("apps/com.android.providers.telephony/d_f/*_sms_backup"):
|
||||
self.log.info("Processing SMS backup file at %s", file)
|
||||
data = self._get_file_content(file)
|
||||
self.results.extend(parse_sms_file(data))
|
||||
|
||||
with open(file_path, "rb") as handle:
|
||||
data = zlib.decompress(handle.read())
|
||||
json_data = json.loads(data)
|
||||
for file in self._get_files_by_pattern("apps/com.android.providers.telephony/d_f/*_mms_backup"):
|
||||
self.log.info("Processing MMS backup file at %s", file)
|
||||
data = self._get_file_content(file)
|
||||
self.results.extend(parse_sms_file(data))
|
||||
|
||||
for entry in json_data:
|
||||
message_links = check_for_links(entry["body"])
|
||||
|
||||
# If we find links in the messages or if they are empty we add them to the list.
|
||||
if message_links or entry["body"].strip() == "":
|
||||
self.results.append(entry)
|
||||
|
||||
def run(self):
|
||||
app_folder = os.path.join(self.base_folder,
|
||||
"apps",
|
||||
"com.android.providers.telephony",
|
||||
"d_f")
|
||||
if not os.path.exists(app_folder):
|
||||
raise FileNotFoundError("Unable to find the SMS backup folder")
|
||||
|
||||
for file_name in os.listdir(app_folder):
|
||||
if not file_name.endswith("_sms_backup"):
|
||||
continue
|
||||
|
||||
file_path = os.path.join(app_folder, file_name)
|
||||
self._process_sms_file(file_path)
|
||||
|
||||
self.log.info("Extracted a total of %d SMS messages containing links",
|
||||
self.log.info("Extracted a total of %d SMS & MMS messages containing links",
|
||||
len(self.results))
|
||||
|
||||
17
mvt/android/modules/bugreport/__init__.py
Normal file
17
mvt/android/modules/bugreport/__init__.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
from .accessibility import Accessibility
|
||||
from .activities import Activities
|
||||
from .appops import Appops
|
||||
from .battery_daily import BatteryDaily
|
||||
from .battery_history import BatteryHistory
|
||||
from .dbinfo import DBInfo
|
||||
from .getprop import Getprop
|
||||
from .packages import Packages
|
||||
from .receivers import Receivers
|
||||
|
||||
BUGREPORT_MODULES = [Accessibility, Activities, Appops, BatteryDaily,
|
||||
BatteryHistory, DBInfo, Getprop, Packages, Receivers]
|
||||
61
mvt/android/modules/bugreport/accessibility.py
Normal file
61
mvt/android/modules/bugreport/accessibility.py
Normal file
@@ -0,0 +1,61 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_accessibility
|
||||
|
||||
from .base import BugReportModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Accessibility(BugReportModule):
|
||||
"""This module extracts stats on accessibility."""
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_app_id(result["package_name"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self) -> None:
|
||||
content = self._get_dumpstate_file()
|
||||
if not content:
|
||||
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
|
||||
return
|
||||
|
||||
lines = []
|
||||
in_accessibility = False
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip() == "DUMP OF SERVICE accessibility:":
|
||||
in_accessibility = True
|
||||
continue
|
||||
|
||||
if not in_accessibility:
|
||||
continue
|
||||
|
||||
if line.strip().startswith("------------------------------------------------------------------------------"):
|
||||
break
|
||||
|
||||
lines.append(line)
|
||||
|
||||
self.results = parse_dumpsys_accessibility("\n".join(lines))
|
||||
for result in self.results:
|
||||
log.info("Found installed accessibility service \"%s\"", result.get("service"))
|
||||
|
||||
self.log.info("Identified a total of %d accessibility services", len(self.results))
|
||||
62
mvt/android/modules/bugreport/activities.py
Normal file
62
mvt/android/modules/bugreport/activities.py
Normal file
@@ -0,0 +1,62 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_activity_resolver_table
|
||||
|
||||
from .base import BugReportModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Activities(BugReportModule):
|
||||
"""This module extracts details on receivers for risky activities."""
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.results = results if results else {}
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for intent, activities in self.results.items():
|
||||
for activity in activities:
|
||||
ioc = self.indicators.check_app_id(activity["package_name"])
|
||||
if ioc:
|
||||
activity["matched_indicator"] = ioc
|
||||
self.detected.append({intent: activity})
|
||||
continue
|
||||
|
||||
def run(self) -> None:
|
||||
content = self._get_dumpstate_file()
|
||||
if not content:
|
||||
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
|
||||
return
|
||||
|
||||
lines = []
|
||||
in_package = False
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip() == "DUMP OF SERVICE package:":
|
||||
in_package = True
|
||||
continue
|
||||
|
||||
if not in_package:
|
||||
continue
|
||||
|
||||
if line.strip().startswith("------------------------------------------------------------------------------"):
|
||||
break
|
||||
|
||||
lines.append(line)
|
||||
|
||||
self.results = parse_dumpsys_activity_resolver_table("\n".join(lines))
|
||||
|
||||
self.log.info("Extracted activities for %d intents", len(self.results))
|
||||
79
mvt/android/modules/bugreport/appops.py
Normal file
79
mvt/android/modules/bugreport/appops.py
Normal file
@@ -0,0 +1,79 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_appops
|
||||
|
||||
from .base import BugReportModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Appops(BugReportModule):
|
||||
"""This module extracts information on package from App-Ops Manager."""
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record: dict) -> None:
|
||||
records = []
|
||||
for perm in record["permissions"]:
|
||||
if "entries" not in perm:
|
||||
continue
|
||||
|
||||
for entry in perm["entries"]:
|
||||
if "timestamp" in entry:
|
||||
records.append({
|
||||
"timestamp": entry["timestamp"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": entry["access"],
|
||||
"data": f"{record['package_name']} access to {perm['name']}: {entry['access']}",
|
||||
})
|
||||
|
||||
return records
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
for result in self.results:
|
||||
if self.indicators:
|
||||
ioc = self.indicators.check_app_id(result.get("package_name"))
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
for perm in result["permissions"]:
|
||||
if perm["name"] == "REQUEST_INSTALL_PACKAGES" and perm["access"] == "allow":
|
||||
self.log.info("Package %s with REQUEST_INSTALL_PACKAGES permission", result["package_name"])
|
||||
|
||||
def run(self) -> None:
|
||||
content = self._get_dumpstate_file()
|
||||
if not content:
|
||||
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
|
||||
return
|
||||
|
||||
lines = []
|
||||
in_appops = False
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip() == "DUMP OF SERVICE appops:":
|
||||
in_appops = True
|
||||
continue
|
||||
|
||||
if not in_appops:
|
||||
continue
|
||||
|
||||
if line.strip().startswith("------------------------------------------------------------------------------"):
|
||||
break
|
||||
|
||||
lines.append(line)
|
||||
|
||||
self.results = parse_dumpsys_appops("\n".join(lines))
|
||||
|
||||
self.log.info("Identified a total of %d packages in App-Ops Manager",
|
||||
len(self.results))
|
||||
71
mvt/android/modules/bugreport/base.py
Normal file
71
mvt/android/modules/bugreport/base.py
Normal file
@@ -0,0 +1,71 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# See the file 'LICENSE' for usage and copying permissions, or find a copy at
|
||||
# https://github.com/mvt-project/mvt/blob/main/LICENSE
|
||||
|
||||
import fnmatch
|
||||
import logging
|
||||
import os
|
||||
from zipfile import ZipFile
|
||||
|
||||
from mvt.common.module import MVTModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BugReportModule(MVTModule):
|
||||
"""This class provides a base for all Android Bug Report modules."""
|
||||
|
||||
zip_archive = None
|
||||
|
||||
def from_folder(self, extract_path: str, extract_files: str) -> None:
|
||||
self.extract_path = extract_path
|
||||
self.extract_files = extract_files
|
||||
|
||||
def from_zip(self, zip_archive: ZipFile, zip_files: list) -> None:
|
||||
self.zip_archive = zip_archive
|
||||
self.zip_files = zip_files
|
||||
|
||||
def _get_files_by_pattern(self, pattern: str) -> list:
|
||||
file_names = []
|
||||
if self.zip_archive:
|
||||
for zip_file in self.zip_files:
|
||||
file_names.append(zip_file)
|
||||
else:
|
||||
file_names = self.extract_files
|
||||
|
||||
return fnmatch.filter(file_names, pattern)
|
||||
|
||||
def _get_files_by_patterns(self, patterns: list) -> list:
|
||||
for pattern in patterns:
|
||||
matches = self._get_files_by_pattern(pattern)
|
||||
if matches:
|
||||
return matches
|
||||
|
||||
def _get_file_content(self, file_path: str) -> bytes:
|
||||
if self.zip_archive:
|
||||
handle = self.zip_archive.open(file_path)
|
||||
else:
|
||||
handle = open(os.path.join(self.extract_path, file_path), "rb")
|
||||
|
||||
data = handle.read()
|
||||
handle.close()
|
||||
|
||||
return data
|
||||
|
||||
def _get_dumpstate_file(self) -> bytes:
|
||||
main = self._get_files_by_pattern("main_entry.txt")
|
||||
if main:
|
||||
main_content = self._get_file_content(main[0])
|
||||
try:
|
||||
return self._get_file_content(main_content.decode().strip())
|
||||
except KeyError:
|
||||
return None
|
||||
else:
|
||||
dumpstate_logs = self._get_files_by_pattern("dumpState_*.log")
|
||||
if not dumpstate_logs:
|
||||
return None
|
||||
|
||||
return self._get_file_content(dumpstate_logs[0])
|
||||
|
||||
return None
|
||||
77
mvt/android/modules/bugreport/battery_daily.py
Normal file
77
mvt/android/modules/bugreport/battery_daily.py
Normal file
@@ -0,0 +1,77 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_battery_daily
|
||||
|
||||
from .base import BugReportModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BatteryDaily(BugReportModule):
|
||||
"""This module extracts records from battery daily updates."""
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record: dict) -> None:
|
||||
return {
|
||||
"timestamp": record["from"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "battery_daily",
|
||||
"data": f"Recorded update of package {record['package_name']} with vers {record['vers']}"
|
||||
}
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_app_id(result["package_name"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self) -> None:
|
||||
content = self._get_dumpstate_file()
|
||||
if not content:
|
||||
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
|
||||
return
|
||||
|
||||
lines = []
|
||||
in_batterystats = False
|
||||
in_daily = False
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip() == "DUMP OF SERVICE batterystats:":
|
||||
in_batterystats = True
|
||||
continue
|
||||
|
||||
if not in_batterystats:
|
||||
continue
|
||||
|
||||
if line.strip() == "Daily stats:":
|
||||
lines.append(line)
|
||||
in_daily = True
|
||||
continue
|
||||
|
||||
if not in_daily:
|
||||
continue
|
||||
|
||||
if line.strip() == "":
|
||||
break
|
||||
|
||||
lines.append(line)
|
||||
|
||||
self.results = parse_dumpsys_battery_daily("\n".join(lines))
|
||||
|
||||
self.log.info("Extracted a total of %d battery daily stats",
|
||||
len(self.results))
|
||||
61
mvt/android/modules/bugreport/battery_history.py
Normal file
61
mvt/android/modules/bugreport/battery_history.py
Normal file
@@ -0,0 +1,61 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_battery_history
|
||||
|
||||
from .base import BugReportModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BatteryHistory(BugReportModule):
|
||||
"""This module extracts records from battery daily updates."""
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_app_id(result["package_name"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self) -> None:
|
||||
content = self._get_dumpstate_file()
|
||||
if not content:
|
||||
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
|
||||
return
|
||||
|
||||
lines = []
|
||||
in_history = False
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip().startswith("Battery History "):
|
||||
lines.append(line)
|
||||
in_history = True
|
||||
continue
|
||||
|
||||
if not in_history:
|
||||
continue
|
||||
|
||||
if line.strip() == "":
|
||||
break
|
||||
|
||||
lines.append(line)
|
||||
|
||||
self.results = parse_dumpsys_battery_history("\n".join(lines))
|
||||
|
||||
self.log.info("Extracted a total of %d battery history records",
|
||||
len(self.results))
|
||||
64
mvt/android/modules/bugreport/dbinfo.py
Normal file
64
mvt/android/modules/bugreport/dbinfo.py
Normal file
@@ -0,0 +1,64 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_dbinfo
|
||||
|
||||
from .base import BugReportModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DBInfo(BugReportModule):
|
||||
"""This module extracts records from battery daily updates."""
|
||||
|
||||
slug = "dbinfo"
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
path = result.get("path", "")
|
||||
for part in path.split("/"):
|
||||
ioc = self.indicators.check_app_id(part)
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self) -> None:
|
||||
content = self._get_dumpstate_file()
|
||||
if not content:
|
||||
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
|
||||
return
|
||||
|
||||
in_dbinfo = False
|
||||
lines = []
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip() == "DUMP OF SERVICE dbinfo:":
|
||||
in_dbinfo = True
|
||||
continue
|
||||
|
||||
if not in_dbinfo:
|
||||
continue
|
||||
|
||||
if line.strip().startswith("------------------------------------------------------------------------------"):
|
||||
break
|
||||
|
||||
lines.append(line)
|
||||
|
||||
self.results = parse_dumpsys_dbinfo("\n".join(lines))
|
||||
|
||||
self.log.info("Extracted a total of %d database connection pool records",
|
||||
len(self.results))
|
||||
59
mvt/android/modules/bugreport/getprop.py
Normal file
59
mvt/android/modules/bugreport/getprop.py
Normal file
@@ -0,0 +1,59 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from mvt.android.parsers import parse_getprop
|
||||
|
||||
from .base import BugReportModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Getprop(BugReportModule):
|
||||
"""This module extracts device properties from getprop command."""
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.results = {} if not results else results
|
||||
|
||||
def run(self) -> None:
|
||||
content = self._get_dumpstate_file()
|
||||
if not content:
|
||||
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
|
||||
return
|
||||
|
||||
lines = []
|
||||
in_getprop = False
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip() == "------ SYSTEM PROPERTIES (getprop) ------":
|
||||
in_getprop = True
|
||||
continue
|
||||
|
||||
if not in_getprop:
|
||||
continue
|
||||
|
||||
if line.strip() == "------":
|
||||
break
|
||||
|
||||
lines.append(line)
|
||||
|
||||
self.results = parse_getprop("\n".join(lines))
|
||||
|
||||
# Alert if phone is outdated.
|
||||
security_patch = self.results.get("ro.build.version.security_patch", "")
|
||||
if security_patch:
|
||||
patch_date = datetime.strptime(security_patch, "%Y-%m-%d")
|
||||
if (datetime.now() - patch_date) > timedelta(days=6*30):
|
||||
self.log.warning("This phone has not received security updates for more than "
|
||||
"six months (last update: %s)", security_patch)
|
||||
|
||||
self.log.info("Extracted %d Android system properties", len(self.results))
|
||||
188
mvt/android/modules/bugreport/packages.py
Normal file
188
mvt/android/modules/bugreport/packages.py
Normal file
@@ -0,0 +1,188 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
from mvt.android.modules.adb.packages import (DANGEROUS_PERMISSIONS,
|
||||
DANGEROUS_PERMISSIONS_THRESHOLD,
|
||||
ROOT_PACKAGES)
|
||||
|
||||
from .base import BugReportModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Packages(BugReportModule):
|
||||
"""This module extracts details on receivers for risky activities."""
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record: dict) -> None:
|
||||
records = []
|
||||
|
||||
timestamps = [
|
||||
{"event": "package_install", "timestamp": record["timestamp"]},
|
||||
{"event": "package_first_install", "timestamp": record["first_install_time"]},
|
||||
{"event": "package_last_update", "timestamp": record["last_update_time"]},
|
||||
]
|
||||
|
||||
for ts in timestamps:
|
||||
records.append({
|
||||
"timestamp": ts["timestamp"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": ts["event"],
|
||||
"data": f"Install or update of package {record['package_name']}",
|
||||
})
|
||||
|
||||
return records
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
for result in self.results:
|
||||
if result["package_name"] in ROOT_PACKAGES:
|
||||
self.log.warning("Found an installed package related to rooting/jailbreaking: \"%s\"",
|
||||
result["package_name"])
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
if not self.indicators:
|
||||
continue
|
||||
|
||||
ioc = self.indicators.check_app_id(result.get("package_name"))
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
@staticmethod
|
||||
def parse_package_for_details(output: str) -> dict:
|
||||
details = {
|
||||
"uid": "",
|
||||
"version_name": "",
|
||||
"version_code": "",
|
||||
"timestamp": "",
|
||||
"first_install_time": "",
|
||||
"last_update_time": "",
|
||||
"requested_permissions": [],
|
||||
}
|
||||
|
||||
in_install_permissions = False
|
||||
in_runtime_permissions = False
|
||||
for line in output.splitlines():
|
||||
if in_install_permissions:
|
||||
if line.startswith(" " * 4) and not line.startswith(" " * 6):
|
||||
in_install_permissions = False
|
||||
continue
|
||||
|
||||
permission = line.strip().split(":")[0]
|
||||
if permission not in details["requested_permissions"]:
|
||||
details["requested_permissions"].append(permission)
|
||||
|
||||
if in_runtime_permissions:
|
||||
if not line.startswith(" " * 8):
|
||||
in_runtime_permissions = False
|
||||
continue
|
||||
|
||||
permission = line.strip().split(":")[0]
|
||||
if permission not in details["requested_permissions"]:
|
||||
details["requested_permissions"].append(permission)
|
||||
|
||||
if line.strip().startswith("userId="):
|
||||
details["uid"] = line.split("=")[1].strip()
|
||||
elif line.strip().startswith("versionName="):
|
||||
details["version_name"] = line.split("=")[1].strip()
|
||||
elif line.strip().startswith("versionCode="):
|
||||
details["version_code"] = line.split("=", 1)[1].strip()
|
||||
elif line.strip().startswith("timeStamp="):
|
||||
details["timestamp"] = line.split("=")[1].strip()
|
||||
elif line.strip().startswith("firstInstallTime="):
|
||||
details["first_install_time"] = line.split("=")[1].strip()
|
||||
elif line.strip().startswith("lastUpdateTime="):
|
||||
details["last_update_time"] = line.split("=")[1].strip()
|
||||
elif line.strip() == "install permissions:":
|
||||
in_install_permissions = True
|
||||
elif line.strip() == "runtime permissions:":
|
||||
in_runtime_permissions = True
|
||||
|
||||
return details
|
||||
|
||||
def parse_packages_list(self, output: str) -> list:
|
||||
pkg_rxp = re.compile(r" Package \[(.+?)\].*")
|
||||
|
||||
results = []
|
||||
package_name = None
|
||||
package = {}
|
||||
lines = []
|
||||
for line in output.splitlines():
|
||||
if line.startswith(" Package ["):
|
||||
if len(lines) > 0:
|
||||
details = self.parse_package_for_details("\n".join(lines))
|
||||
package.update(details)
|
||||
results.append(package)
|
||||
lines = []
|
||||
package = {}
|
||||
|
||||
matches = pkg_rxp.findall(line)
|
||||
if not matches:
|
||||
continue
|
||||
|
||||
package_name = matches[0]
|
||||
package["package_name"] = package_name
|
||||
continue
|
||||
|
||||
if not package_name:
|
||||
continue
|
||||
|
||||
lines.append(line)
|
||||
|
||||
return results
|
||||
|
||||
def run(self) -> None:
|
||||
content = self._get_dumpstate_file()
|
||||
if not content:
|
||||
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
|
||||
return
|
||||
|
||||
in_package = False
|
||||
in_packages_list = False
|
||||
lines = []
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip() == "DUMP OF SERVICE package:":
|
||||
in_package = True
|
||||
continue
|
||||
|
||||
if not in_package:
|
||||
continue
|
||||
|
||||
if line.strip() == "Packages:":
|
||||
in_packages_list = True
|
||||
continue
|
||||
|
||||
if not in_packages_list:
|
||||
continue
|
||||
|
||||
if line.strip() == "":
|
||||
break
|
||||
|
||||
lines.append(line)
|
||||
|
||||
self.results = self.parse_packages_list("\n".join(lines))
|
||||
|
||||
for result in self.results:
|
||||
dangerous_permissions_count = 0
|
||||
for perm in result["requested_permissions"]:
|
||||
if perm in DANGEROUS_PERMISSIONS:
|
||||
dangerous_permissions_count += 1
|
||||
|
||||
if dangerous_permissions_count >= DANGEROUS_PERMISSIONS_THRESHOLD:
|
||||
self.log.info("Found package \"%s\" requested %d potentially dangerous permissions",
|
||||
result["package_name"], dangerous_permissions_count)
|
||||
|
||||
self.log.info("Extracted details on %d packages", len(self.results))
|
||||
84
mvt/android/modules/bugreport/receivers.py
Normal file
84
mvt/android/modules/bugreport/receivers.py
Normal file
@@ -0,0 +1,84 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_receiver_resolver_table
|
||||
|
||||
from .base import BugReportModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
INTENT_NEW_OUTGOING_SMS = "android.provider.Telephony.NEW_OUTGOING_SMS"
|
||||
INTENT_SMS_RECEIVED = "android.provider.Telephony.SMS_RECEIVED"
|
||||
INTENT_DATA_SMS_RECEIVED = "android.intent.action.DATA_SMS_RECEIVED"
|
||||
INTENT_PHONE_STATE = "android.intent.action.PHONE_STATE"
|
||||
INTENT_NEW_OUTGOING_CALL = "android.intent.action.NEW_OUTGOING_CALL"
|
||||
|
||||
|
||||
class Receivers(BugReportModule):
|
||||
"""This module extracts details on receivers for risky activities."""
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.results = results if results else {}
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for intent, receivers in self.results.items():
|
||||
for receiver in receivers:
|
||||
if intent == INTENT_NEW_OUTGOING_SMS:
|
||||
self.log.info("Found a receiver to intercept outgoing SMS messages: \"%s\"",
|
||||
receiver["receiver"])
|
||||
elif intent == INTENT_SMS_RECEIVED:
|
||||
self.log.info("Found a receiver to intercept incoming SMS messages: \"%s\"",
|
||||
receiver["receiver"])
|
||||
elif intent == INTENT_DATA_SMS_RECEIVED:
|
||||
self.log.info("Found a receiver to intercept incoming data SMS message: \"%s\"",
|
||||
receiver["receiver"])
|
||||
elif intent == INTENT_PHONE_STATE:
|
||||
self.log.info("Found a receiver monitoring telephony state/incoming calls: \"%s\"",
|
||||
receiver["receiver"])
|
||||
elif intent == INTENT_NEW_OUTGOING_CALL:
|
||||
self.log.info("Found a receiver monitoring outgoing calls: \"%s\"",
|
||||
receiver["receiver"])
|
||||
|
||||
ioc = self.indicators.check_app_id(receiver["package_name"])
|
||||
if ioc:
|
||||
receiver["matched_indicator"] = ioc
|
||||
self.detected.append({intent: receiver})
|
||||
continue
|
||||
|
||||
def run(self) -> None:
|
||||
content = self._get_dumpstate_file()
|
||||
if not content:
|
||||
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
|
||||
return
|
||||
|
||||
in_receivers = False
|
||||
lines = []
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip() == "DUMP OF SERVICE package:":
|
||||
in_receivers = True
|
||||
continue
|
||||
|
||||
if not in_receivers:
|
||||
continue
|
||||
|
||||
if line.strip().startswith("------------------------------------------------------------------------------"):
|
||||
break
|
||||
|
||||
lines.append(line)
|
||||
|
||||
self.results = parse_dumpsys_receiver_resolver_table("\n".join(lines))
|
||||
|
||||
self.log.info("Extracted receivers for %d intents", len(self.results))
|
||||
11
mvt/android/parsers/__init__.py
Normal file
11
mvt/android/parsers/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
from .dumpsys import (parse_dumpsys_accessibility,
|
||||
parse_dumpsys_activity_resolver_table,
|
||||
parse_dumpsys_appops, parse_dumpsys_battery_daily,
|
||||
parse_dumpsys_battery_history, parse_dumpsys_dbinfo,
|
||||
parse_dumpsys_receiver_resolver_table)
|
||||
from .getprop import parse_getprop
|
||||
211
mvt/android/parsers/backup.py
Normal file
211
mvt/android/parsers/backup.py
Normal file
@@ -0,0 +1,211 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import datetime
|
||||
import io
|
||||
import json
|
||||
import tarfile
|
||||
import zlib
|
||||
|
||||
from cryptography.hazmat.primitives import hashes, padding
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||
|
||||
from mvt.common.utils import check_for_links, convert_timestamp_to_iso
|
||||
|
||||
PBKDF2_KEY_SIZE = 32
|
||||
|
||||
|
||||
class AndroidBackupParsingError(Exception):
|
||||
"""Exception raised file parsing an android backup file"""
|
||||
|
||||
|
||||
class AndroidBackupNotImplemented(AndroidBackupParsingError):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidBackupPassword(AndroidBackupParsingError):
|
||||
pass
|
||||
|
||||
|
||||
def to_utf8_bytes(input_bytes):
|
||||
output = []
|
||||
for byte in input_bytes:
|
||||
if byte < ord(b'\x80'):
|
||||
output.append(byte)
|
||||
else:
|
||||
output.append(ord('\xef') | (byte >> 12))
|
||||
output.append(ord('\xbc') | ((byte >> 6) & ord('\x3f')))
|
||||
output.append(ord('\x80') | (byte & ord('\x3f')))
|
||||
return bytes(output)
|
||||
|
||||
|
||||
def parse_ab_header(data):
|
||||
"""
|
||||
Parse the header of an Android Backup file
|
||||
Returns a dict {'backup': True, 'compression': False,
|
||||
'encryption': "none", 'version': 4}
|
||||
"""
|
||||
if data.startswith(b"ANDROID BACKUP"):
|
||||
[magic_header, version, is_compressed, encryption, tar_data] = data.split(b"\n", 4)
|
||||
return {
|
||||
"backup": True,
|
||||
"compression": (is_compressed == b"1"),
|
||||
"version": int(version),
|
||||
"encryption": encryption.decode("utf-8")
|
||||
}
|
||||
|
||||
return {
|
||||
"backup": False,
|
||||
"compression": None,
|
||||
"version": None,
|
||||
"encryption": None
|
||||
}
|
||||
|
||||
|
||||
def decrypt_master_key(password, user_salt, user_iv, pbkdf2_rounds, master_key_blob, format_version, checksum_salt):
|
||||
"""Generate AES key from user password uisng PBKDF2
|
||||
|
||||
The backup master key is extracted from the master key blog after decryption.
|
||||
"""
|
||||
# Derive key from password using PBKDF2.
|
||||
kdf = PBKDF2HMAC(algorithm=hashes.SHA1(), length=32, salt=user_salt, iterations=pbkdf2_rounds)
|
||||
key = kdf.derive(password.encode("utf-8"))
|
||||
|
||||
# Decrypt master key blob.
|
||||
cipher = Cipher(algorithms.AES(key), modes.CBC(user_iv))
|
||||
decryptor = cipher.decryptor()
|
||||
try:
|
||||
decryted_master_key_blob = decryptor.update(master_key_blob) + decryptor.finalize()
|
||||
|
||||
# Extract key and IV from decrypted blob.
|
||||
key_blob = io.BytesIO(decryted_master_key_blob)
|
||||
master_iv_length = ord(key_blob.read(1))
|
||||
master_iv = key_blob.read(master_iv_length)
|
||||
|
||||
master_key_length = ord(key_blob.read(1))
|
||||
master_key = key_blob.read(master_key_length)
|
||||
|
||||
master_key_checksum_length = ord(key_blob.read(1))
|
||||
master_key_checksum = key_blob.read(master_key_checksum_length)
|
||||
except TypeError:
|
||||
raise InvalidBackupPassword()
|
||||
|
||||
# Handle quirky encoding of master key bytes in Android original Java crypto code.
|
||||
if format_version > 1:
|
||||
hmac_mk = to_utf8_bytes(master_key)
|
||||
else:
|
||||
hmac_mk = master_key
|
||||
|
||||
# Derive checksum to confirm successful backup decryption.
|
||||
kdf = PBKDF2HMAC(algorithm=hashes.SHA1(), length=32, salt=checksum_salt, iterations=pbkdf2_rounds)
|
||||
calculated_checksum = kdf.derive(hmac_mk)
|
||||
|
||||
if master_key_checksum != calculated_checksum:
|
||||
raise InvalidBackupPassword()
|
||||
|
||||
return master_key, master_iv
|
||||
|
||||
|
||||
def decrypt_backup_data(encrypted_backup, password, encryption_algo, format_version):
|
||||
"""
|
||||
Generate encryption keyffrom password and do decryption
|
||||
|
||||
"""
|
||||
if encryption_algo != b"AES-256":
|
||||
raise AndroidBackupNotImplemented("Encryption Algorithm not implemented")
|
||||
|
||||
if password is None:
|
||||
raise InvalidBackupPassword()
|
||||
|
||||
[user_salt, checksum_salt, pbkdf2_rounds, user_iv, master_key_blob, encrypted_data] = encrypted_backup.split(b"\n", 5)
|
||||
user_salt = bytes.fromhex(user_salt.decode("utf-8"))
|
||||
checksum_salt = bytes.fromhex(checksum_salt.decode("utf-8"))
|
||||
pbkdf2_rounds = int(pbkdf2_rounds)
|
||||
user_iv = bytes.fromhex(user_iv.decode("utf-8"))
|
||||
master_key_blob = bytes.fromhex(master_key_blob.decode("utf-8"))
|
||||
|
||||
# Derive decryption master key from password.
|
||||
master_key, master_iv = decrypt_master_key(password=password, user_salt=user_salt, user_iv=user_iv,
|
||||
pbkdf2_rounds=pbkdf2_rounds, master_key_blob=master_key_blob,
|
||||
format_version=format_version, checksum_salt=checksum_salt)
|
||||
|
||||
# Decrypt and unpad backup data using derivied key.
|
||||
cipher = Cipher(algorithms.AES(master_key), modes.CBC(master_iv))
|
||||
decryptor = cipher.decryptor()
|
||||
decrypted_tar = decryptor.update(encrypted_data) + decryptor.finalize()
|
||||
|
||||
unpadder = padding.PKCS7(128).unpadder()
|
||||
return unpadder.update(decrypted_tar)
|
||||
|
||||
|
||||
def parse_backup_file(data, password=None):
|
||||
"""
|
||||
Parse an ab file, returns a tar file
|
||||
|
||||
"""
|
||||
if not data.startswith(b"ANDROID BACKUP"):
|
||||
raise AndroidBackupParsingError("Invalid file header")
|
||||
|
||||
[magic_header, version, is_compressed, encryption_algo, tar_data] = data.split(b"\n", 4)
|
||||
version = int(version)
|
||||
is_compressed = int(is_compressed)
|
||||
|
||||
if encryption_algo != b"none":
|
||||
tar_data = decrypt_backup_data(tar_data, password, encryption_algo, format_version=version)
|
||||
|
||||
if is_compressed:
|
||||
try:
|
||||
tar_data = zlib.decompress(tar_data)
|
||||
except zlib.error:
|
||||
raise AndroidBackupParsingError("Impossible to decompress the backup file")
|
||||
|
||||
return tar_data
|
||||
|
||||
|
||||
def parse_tar_for_sms(data):
|
||||
"""
|
||||
Extract SMS from a tar backup archive
|
||||
Returns an array of SMS
|
||||
"""
|
||||
dbytes = io.BytesIO(data)
|
||||
tar = tarfile.open(fileobj=dbytes)
|
||||
res = []
|
||||
for member in tar.getmembers():
|
||||
if member.name.startswith("apps/com.android.providers.telephony/d_f/") and \
|
||||
(member.name.endswith("_sms_backup") or member.name.endswith("_mms_backup")):
|
||||
dhandler = tar.extractfile(member)
|
||||
res.extend(parse_sms_file(dhandler.read()))
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def parse_sms_file(data):
|
||||
"""
|
||||
Parse an SMS or MMS file extracted from a backup
|
||||
Returns a list of SMS or MMS entries
|
||||
"""
|
||||
res = []
|
||||
data = zlib.decompress(data)
|
||||
json_data = json.loads(data)
|
||||
|
||||
for entry in json_data:
|
||||
# Adapt MMS format to SMS format
|
||||
if "mms_body" in entry:
|
||||
entry["body"] = entry["mms_body"]
|
||||
entry.pop("mms_body")
|
||||
|
||||
message_links = check_for_links(entry["body"])
|
||||
|
||||
utc_timestamp = datetime.datetime.utcfromtimestamp(int(entry["date"]) / 1000)
|
||||
entry["isodate"] = convert_timestamp_to_iso(utc_timestamp)
|
||||
entry["direction"] = ("sent" if int(entry["date_sent"]) else "received")
|
||||
|
||||
# If we find links in the messages or if they are empty we add them to the list.
|
||||
if message_links or entry["body"].strip() == "":
|
||||
entry["links"] = message_links
|
||||
res.append(entry)
|
||||
|
||||
return res
|
||||
375
mvt/android/parsers/dumpsys.py
Normal file
375
mvt/android/parsers/dumpsys.py
Normal file
@@ -0,0 +1,375 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import re
|
||||
from datetime import datetime
|
||||
|
||||
from mvt.common.utils import convert_timestamp_to_iso
|
||||
|
||||
|
||||
def parse_dumpsys_accessibility(output: str) -> list:
|
||||
results = []
|
||||
|
||||
in_services = False
|
||||
for line in output.splitlines():
|
||||
if line.strip().startswith("installed services:"):
|
||||
in_services = True
|
||||
continue
|
||||
|
||||
if not in_services:
|
||||
continue
|
||||
|
||||
if line.strip() == "}":
|
||||
break
|
||||
|
||||
service = line.split(":")[1].strip()
|
||||
|
||||
results.append({
|
||||
"package_name": service.split("/")[0],
|
||||
"service": service,
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def parse_dumpsys_activity_resolver_table(output: str) -> dict:
|
||||
results = {}
|
||||
|
||||
in_activity_resolver_table = False
|
||||
in_non_data_actions = False
|
||||
intent = None
|
||||
for line in output.splitlines():
|
||||
if line.startswith("Activity Resolver Table:"):
|
||||
in_activity_resolver_table = True
|
||||
continue
|
||||
|
||||
if not in_activity_resolver_table:
|
||||
continue
|
||||
|
||||
if line.startswith(" Non-Data Actions:"):
|
||||
in_non_data_actions = True
|
||||
continue
|
||||
|
||||
if not in_non_data_actions:
|
||||
continue
|
||||
|
||||
# If we hit an empty line, the Non-Data Actions section should be
|
||||
# finished.
|
||||
if line.strip() == "":
|
||||
break
|
||||
|
||||
# We detect the action name.
|
||||
if line.startswith(" " * 6) and not line.startswith(" " * 8) and ":" in line:
|
||||
intent = line.strip().replace(":", "")
|
||||
results[intent] = []
|
||||
continue
|
||||
|
||||
# If we are not in an intent block yet, skip.
|
||||
if not intent:
|
||||
continue
|
||||
|
||||
# If we are in a block but the line does not start with 8 spaces
|
||||
# it means the block ended a new one started, so we reset and
|
||||
# continue.
|
||||
if not line.startswith(" " * 8):
|
||||
intent = None
|
||||
continue
|
||||
|
||||
# If we got this far, we are processing receivers for the
|
||||
# activities we are interested in.
|
||||
activity = line.strip().split(" ")[1]
|
||||
package_name = activity.split("/")[0]
|
||||
|
||||
results[intent].append({
|
||||
"package_name": package_name,
|
||||
"activity": activity,
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def parse_dumpsys_battery_daily(output: str) -> list:
|
||||
results = []
|
||||
daily = None
|
||||
daily_updates = []
|
||||
for line in output.splitlines():
|
||||
if line.startswith(" Daily from "):
|
||||
if len(daily_updates) > 0:
|
||||
results.extend(daily_updates)
|
||||
daily_updates = []
|
||||
|
||||
timeframe = line[13:].strip()
|
||||
date_from, date_to = timeframe.strip(":").split(" to ", 1)
|
||||
daily = {"from": date_from[0:10], "to": date_to[0:10]}
|
||||
continue
|
||||
|
||||
if not daily:
|
||||
continue
|
||||
|
||||
if not line.strip().startswith("Update "):
|
||||
continue
|
||||
|
||||
line = line.strip().replace("Update ", "")
|
||||
package_name, vers = line.split(" ", 1)
|
||||
vers_nr = vers.split("=", 1)[1]
|
||||
|
||||
already_seen = False
|
||||
for update in daily_updates:
|
||||
if package_name == update["package_name"] and vers_nr == update["vers"]:
|
||||
already_seen = True
|
||||
break
|
||||
|
||||
if not already_seen:
|
||||
daily_updates.append({
|
||||
"action": "update",
|
||||
"from": daily["from"],
|
||||
"to": daily["to"],
|
||||
"package_name": package_name,
|
||||
"vers": vers_nr,
|
||||
})
|
||||
|
||||
if len(daily_updates) > 0:
|
||||
results.extend(daily_updates)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def parse_dumpsys_battery_history(output: str) -> list:
|
||||
results = []
|
||||
|
||||
for line in output.splitlines():
|
||||
if line.startswith("Battery History "):
|
||||
continue
|
||||
|
||||
if line.strip() == "":
|
||||
break
|
||||
|
||||
time_elapsed = line.strip().split(" ", 1)[0]
|
||||
|
||||
event = ""
|
||||
if line.find("+job") > 0:
|
||||
event = "start_job"
|
||||
uid = line[line.find("+job")+5:line.find(":")]
|
||||
service = line[line.find(":")+1:].strip('"')
|
||||
package_name = service.split("/")[0]
|
||||
elif line.find("-job") > 0:
|
||||
event = "end_job"
|
||||
uid = line[line.find("-job")+5:line.find(":")]
|
||||
service = line[line.find(":")+1:].strip('"')
|
||||
package_name = service.split("/")[0]
|
||||
elif line.find("+running +wake_lock=") > 0:
|
||||
uid = line[line.find("+running +wake_lock=")+21:line.find(":")]
|
||||
event = "wake"
|
||||
service = line[line.find("*walarm*:")+9:].split(" ")[0].strip('"').strip()
|
||||
if service == "" or "/" not in service:
|
||||
continue
|
||||
|
||||
package_name = service.split("/")[0]
|
||||
else:
|
||||
continue
|
||||
|
||||
results.append({
|
||||
"time_elapsed": time_elapsed,
|
||||
"event": event,
|
||||
"uid": uid,
|
||||
"package_name": package_name,
|
||||
"service": service,
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def parse_dumpsys_dbinfo(output: str) -> list:
|
||||
results = []
|
||||
|
||||
rxp = re.compile(r'.*\[([0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3})\].*\[Pid:\((\d+)\)\](\w+).*sql\=\"(.+?)\"')
|
||||
rxp_no_pid = re.compile(r'.*\[([0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3})\][ ]{1}(\w+).*sql\=\"(.+?)\"')
|
||||
|
||||
pool = None
|
||||
in_operations = False
|
||||
for line in output.splitlines():
|
||||
if line.startswith("Connection pool for "):
|
||||
pool = line.replace("Connection pool for ", "").rstrip(":")
|
||||
|
||||
if not pool:
|
||||
continue
|
||||
|
||||
if line.strip() == "Most recently executed operations:":
|
||||
in_operations = True
|
||||
continue
|
||||
|
||||
if not in_operations:
|
||||
continue
|
||||
|
||||
if not line.startswith(" "):
|
||||
in_operations = False
|
||||
pool = None
|
||||
continue
|
||||
|
||||
matches = rxp.findall(line)
|
||||
if not matches:
|
||||
matches = rxp_no_pid.findall(line)
|
||||
if not matches:
|
||||
continue
|
||||
else:
|
||||
match = matches[0]
|
||||
results.append({
|
||||
"isodate": match[0],
|
||||
"action": match[1],
|
||||
"sql": match[2],
|
||||
"path": pool,
|
||||
})
|
||||
else:
|
||||
match = matches[0]
|
||||
results.append({
|
||||
"isodate": match[0],
|
||||
"pid": match[1],
|
||||
"action": match[2],
|
||||
"sql": match[3],
|
||||
"path": pool,
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def parse_dumpsys_receiver_resolver_table(output: str) -> dict:
|
||||
results = {}
|
||||
|
||||
in_receiver_resolver_table = False
|
||||
in_non_data_actions = False
|
||||
intent = None
|
||||
for line in output.splitlines():
|
||||
if line.startswith("Receiver Resolver Table:"):
|
||||
in_receiver_resolver_table = True
|
||||
continue
|
||||
|
||||
if not in_receiver_resolver_table:
|
||||
continue
|
||||
|
||||
if line.startswith(" Non-Data Actions:"):
|
||||
in_non_data_actions = True
|
||||
continue
|
||||
|
||||
if not in_non_data_actions:
|
||||
continue
|
||||
|
||||
# If we hit an empty line, the Non-Data Actions section should be
|
||||
# finished.
|
||||
if line.strip() == "":
|
||||
break
|
||||
|
||||
# We detect the action name.
|
||||
if line.startswith(" " * 6) and not line.startswith(" " * 8) and ":" in line:
|
||||
intent = line.strip().replace(":", "")
|
||||
results[intent] = []
|
||||
continue
|
||||
|
||||
# If we are not in an intent block yet, skip.
|
||||
if not intent:
|
||||
continue
|
||||
|
||||
# If we are in a block but the line does not start with 8 spaces
|
||||
# it means the block ended a new one started, so we reset and
|
||||
# continue.
|
||||
if not line.startswith(" " * 8):
|
||||
intent = None
|
||||
continue
|
||||
|
||||
# If we got this far, we are processing receivers for the
|
||||
# activities we are interested in.
|
||||
receiver = line.strip().split(" ")[1]
|
||||
package_name = receiver.split("/")[0]
|
||||
|
||||
results[intent].append({
|
||||
"package_name": package_name,
|
||||
"receiver": receiver,
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def parse_dumpsys_appops(output: str) -> list:
|
||||
results = []
|
||||
perm = {}
|
||||
package = {}
|
||||
entry = {}
|
||||
uid = None
|
||||
in_packages = False
|
||||
|
||||
for line in output.splitlines():
|
||||
if line.startswith(" Uid 0:"):
|
||||
in_packages = True
|
||||
|
||||
if not in_packages:
|
||||
continue
|
||||
|
||||
if line.startswith(" Uid "):
|
||||
uid = line[6:-1]
|
||||
continue
|
||||
|
||||
if line.startswith(" Package "):
|
||||
if entry:
|
||||
perm["entries"].append(entry)
|
||||
entry = {}
|
||||
|
||||
if package:
|
||||
if perm:
|
||||
package["permissions"].append(perm)
|
||||
|
||||
perm = {}
|
||||
results.append(package)
|
||||
|
||||
package = {
|
||||
"package_name": line[12:-1],
|
||||
"permissions": [],
|
||||
"uid": uid,
|
||||
}
|
||||
continue
|
||||
|
||||
if line.startswith(" ") and line[6] != " ":
|
||||
if entry:
|
||||
perm["entries"].append(entry)
|
||||
entry = {}
|
||||
if perm:
|
||||
package["permissions"].append(perm)
|
||||
perm = {}
|
||||
|
||||
perm["name"] = line.split()[0]
|
||||
perm["entries"] = []
|
||||
if len(line.split()) > 1:
|
||||
perm["access"] = line.split()[1][1:-2]
|
||||
|
||||
continue
|
||||
|
||||
if line.startswith(" "):
|
||||
# Permission entry like:
|
||||
# Reject: [fg-s]2021-05-19 22:02:52.054 (-314d1h25m2s33ms)
|
||||
if entry:
|
||||
perm["entries"].append(entry)
|
||||
entry = {}
|
||||
|
||||
entry["access"] = line.split(":")[0].strip()
|
||||
entry["type"] = line[line.find("[")+1:line.find("]")]
|
||||
|
||||
try:
|
||||
entry["timestamp"] = convert_timestamp_to_iso(
|
||||
datetime.strptime(
|
||||
line[line.find("]")+1:line.find("(")].strip(),
|
||||
"%Y-%m-%d %H:%M:%S.%f"))
|
||||
except ValueError:
|
||||
# Invalid date format
|
||||
pass
|
||||
|
||||
if line.strip() == "":
|
||||
break
|
||||
|
||||
if entry:
|
||||
perm["entries"].append(entry)
|
||||
if perm:
|
||||
package["permissions"].append(perm)
|
||||
if package:
|
||||
results.append(package)
|
||||
|
||||
return results
|
||||
26
mvt/android/parsers/getprop.py
Normal file
26
mvt/android/parsers/getprop.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import re
|
||||
|
||||
|
||||
def parse_getprop(output: str) -> dict:
|
||||
results = {}
|
||||
rxp = re.compile(r"\[(.+?)\]: \[(.+?)\]")
|
||||
|
||||
for line in output.splitlines():
|
||||
line = line.strip()
|
||||
if line == "":
|
||||
continue
|
||||
|
||||
matches = re.findall(rxp, line)
|
||||
if not matches or len(matches[0]) != 2:
|
||||
continue
|
||||
|
||||
key = matches[0][0]
|
||||
value = matches[0][1]
|
||||
results[key] = value
|
||||
|
||||
return results
|
||||
@@ -1,4 +1,4 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
64
mvt/common/cmd_check_iocs.py
Normal file
64
mvt/common/cmd_check_iocs.py
Normal file
@@ -0,0 +1,64 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from mvt.common.command import Command
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CmdCheckIOCS(Command):
|
||||
|
||||
name = "check-iocs"
|
||||
modules = []
|
||||
|
||||
def __init__(self, target_path: str = None, results_path: str = None,
|
||||
ioc_files: list = [], module_name: str = None, serial: str = None,
|
||||
fast_mode: bool = False):
|
||||
super().__init__(target_path=target_path, results_path=results_path,
|
||||
ioc_files=ioc_files, module_name=module_name,
|
||||
serial=serial, fast_mode=fast_mode, log=log)
|
||||
|
||||
def run(self) -> None:
|
||||
all_modules = []
|
||||
for entry in self.modules:
|
||||
if entry not in all_modules:
|
||||
all_modules.append(entry)
|
||||
|
||||
log.info("Checking stored results against provided indicators...")
|
||||
|
||||
total_detections = 0
|
||||
for file_name in os.listdir(self.target_path):
|
||||
name_only, ext = os.path.splitext(file_name)
|
||||
file_path = os.path.join(self.target_path, file_name)
|
||||
|
||||
for iocs_module in all_modules:
|
||||
if self.module_name and iocs_module.__name__ != self.module_name:
|
||||
continue
|
||||
|
||||
if iocs_module().get_slug() != name_only:
|
||||
continue
|
||||
|
||||
log.info("Loading results from \"%s\" with module %s", file_name,
|
||||
iocs_module.__name__)
|
||||
|
||||
m = iocs_module.from_json(file_path,
|
||||
log=logging.getLogger(iocs_module.__module__))
|
||||
if self.iocs.total_ioc_count > 0:
|
||||
m.indicators = self.iocs
|
||||
m.indicators.log = m.log
|
||||
|
||||
try:
|
||||
m.check_indicators()
|
||||
except NotImplementedError:
|
||||
continue
|
||||
else:
|
||||
total_detections += len(m.detected)
|
||||
|
||||
if total_detections > 0:
|
||||
log.warning("The check of the results produced %d detections!",
|
||||
total_detections)
|
||||
186
mvt/common/command.py
Normal file
186
mvt/common/command.py
Normal file
@@ -0,0 +1,186 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from typing import Callable
|
||||
|
||||
from mvt.common.indicators import Indicators
|
||||
from mvt.common.module import run_module, save_timeline
|
||||
from mvt.common.utils import convert_timestamp_to_iso
|
||||
from mvt.common.version import MVT_VERSION
|
||||
|
||||
|
||||
class Command(object):
|
||||
|
||||
def __init__(self, target_path: str = None, results_path: str = None,
|
||||
ioc_files: list = [], module_name: str = None, serial: str = None,
|
||||
fast_mode: bool = False,
|
||||
log: logging.Logger = logging.getLogger(__name__)):
|
||||
self.name = ""
|
||||
|
||||
self.target_path = target_path
|
||||
self.results_path = results_path
|
||||
self.ioc_files = ioc_files
|
||||
self.module_name = module_name
|
||||
self.serial = serial
|
||||
self.fast_mode = fast_mode
|
||||
self.log = log
|
||||
|
||||
self.iocs = Indicators(log=log)
|
||||
self.iocs.load_indicators_files(ioc_files)
|
||||
|
||||
self.timeline = []
|
||||
self.timeline_detected = []
|
||||
|
||||
def _create_storage(self) -> None:
|
||||
if self.results_path and not os.path.exists(self.results_path):
|
||||
try:
|
||||
os.makedirs(self.results_path)
|
||||
except Exception as e:
|
||||
self.log.critical("Unable to create output folder %s: %s",
|
||||
self.results_path, e)
|
||||
sys.exit(1)
|
||||
|
||||
def _add_log_file_handler(self, logger: logging.Logger) -> None:
|
||||
if not self.results_path:
|
||||
return
|
||||
|
||||
fh = logging.FileHandler(os.path.join(self.results_path, "command.log"))
|
||||
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
|
||||
fh.setLevel(logging.DEBUG)
|
||||
fh.setFormatter(formatter)
|
||||
logger.addHandler(fh)
|
||||
|
||||
def _store_timeline(self) -> None:
|
||||
if not self.results_path:
|
||||
return
|
||||
|
||||
if len(self.timeline) > 0:
|
||||
save_timeline(self.timeline,
|
||||
os.path.join(self.results_path, "timeline.csv"))
|
||||
|
||||
if len(self.timeline_detected) > 0:
|
||||
save_timeline(self.timeline_detected,
|
||||
os.path.join(self.results_path, "timeline_detected.csv"))
|
||||
|
||||
def _store_info(self) -> None:
|
||||
if not self.results_path:
|
||||
return
|
||||
|
||||
target_path = None
|
||||
if self.target_path:
|
||||
target_path = os.path.abspath(self.target_path)
|
||||
|
||||
info = {
|
||||
"target_path": target_path,
|
||||
"mvt_version": MVT_VERSION,
|
||||
"date": convert_timestamp_to_iso(datetime.now()),
|
||||
"ioc_files": [],
|
||||
"hashes": [],
|
||||
}
|
||||
|
||||
for coll in self.iocs.ioc_collections:
|
||||
info["ioc_files"].append(coll.get("stix2_file_path", ""))
|
||||
|
||||
# TODO: Revisit if setting this from environment variable is good
|
||||
# enough.
|
||||
if self.target_path and os.environ.get("MVT_HASH_FILES"):
|
||||
if os.path.isfile(self.target_path):
|
||||
h = hashlib.sha256()
|
||||
with open(self.target_path, "rb") as handle:
|
||||
h.update(handle.read())
|
||||
|
||||
info["hashes"].append({
|
||||
"file_path": self.target_path,
|
||||
"sha256": h.hexdigest(),
|
||||
})
|
||||
elif os.path.isdir(self.target_path):
|
||||
for (root, dirs, files) in os.walk(self.target_path):
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
h = hashlib.sha256()
|
||||
|
||||
try:
|
||||
with open(file_path, "rb") as handle:
|
||||
h.update(handle.read())
|
||||
except FileNotFoundError:
|
||||
self.log.error("Failed to hash the file %s: might be a symlink", file_path)
|
||||
continue
|
||||
except PermissionError:
|
||||
self.log.error("Failed to hash the file %s: permission denied", file_path)
|
||||
continue
|
||||
|
||||
info["hashes"].append({
|
||||
"file_path": file_path,
|
||||
"sha256": h.hexdigest(),
|
||||
})
|
||||
|
||||
with open(os.path.join(self.results_path, "info.json"), "w+") as handle:
|
||||
json.dump(info, handle, indent=4)
|
||||
|
||||
def list_modules(self) -> None:
|
||||
self.log.info("Following is the list of available %s modules:", self.name)
|
||||
for module in self.modules:
|
||||
self.log.info(" - %s", module.__name__)
|
||||
|
||||
def init(self) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
def module_init(self, module: Callable) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
def finish(self) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
def run(self) -> None:
|
||||
self._create_storage()
|
||||
self._add_log_file_handler(self.log)
|
||||
|
||||
try:
|
||||
self.init()
|
||||
except NotImplementedError:
|
||||
pass
|
||||
|
||||
for module in self.modules:
|
||||
if self.module_name and module.__name__ != self.module_name:
|
||||
continue
|
||||
|
||||
module_logger = logging.getLogger(module.__module__)
|
||||
self._add_log_file_handler(module_logger)
|
||||
|
||||
m = module(target_path=self.target_path,
|
||||
results_path=self.results_path,
|
||||
fast_mode=self.fast_mode,
|
||||
log=module_logger)
|
||||
|
||||
if self.iocs.total_ioc_count:
|
||||
m.indicators = self.iocs
|
||||
m.indicators.log = m.log
|
||||
|
||||
if self.serial:
|
||||
m.serial = self.serial
|
||||
|
||||
try:
|
||||
self.module_init(m)
|
||||
except NotImplementedError:
|
||||
pass
|
||||
|
||||
run_module(m)
|
||||
|
||||
self.timeline.extend(m.timeline)
|
||||
self.timeline_detected.extend(m.timeline_detected)
|
||||
|
||||
self._store_timeline()
|
||||
self._store_info()
|
||||
|
||||
try:
|
||||
self.finish()
|
||||
except NotImplementedError:
|
||||
pass
|
||||
@@ -1,5 +1,5 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
|
||||
@@ -1,96 +1,225 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
from appdirs import user_data_dir
|
||||
|
||||
from .url import URL
|
||||
|
||||
MVT_DATA_FOLDER = user_data_dir("mvt")
|
||||
MVT_INDICATORS_FOLDER = os.path.join(MVT_DATA_FOLDER, "indicators")
|
||||
|
||||
class IndicatorsFileBadFormat(Exception):
|
||||
pass
|
||||
|
||||
class Indicators:
|
||||
"""This class is used to parse indicators from a STIX2 file and provide
|
||||
functions to compare extracted artifacts to the indicators.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, log=None):
|
||||
def __init__(self, log=logging.Logger) -> None:
|
||||
self.log = log
|
||||
self.ioc_domains = []
|
||||
self.ioc_processes = []
|
||||
self.ioc_emails = []
|
||||
self.ioc_files = []
|
||||
self.ioc_files_sha256 = []
|
||||
self.ioc_app_ids = []
|
||||
self.ioc_count = 0
|
||||
self.ioc_collections = []
|
||||
self.total_ioc_count = 0
|
||||
|
||||
def _add_indicator(self, ioc, iocs_list):
|
||||
if ioc not in iocs_list:
|
||||
iocs_list.append(ioc)
|
||||
self.ioc_count += 1
|
||||
def _load_downloaded_indicators(self) -> None:
|
||||
if not os.path.isdir(MVT_INDICATORS_FOLDER):
|
||||
return
|
||||
|
||||
def parse_stix2(self, file_path):
|
||||
for ioc_file_name in os.listdir(MVT_INDICATORS_FOLDER):
|
||||
if ioc_file_name.lower().endswith(".stix2"):
|
||||
self.parse_stix2(os.path.join(MVT_INDICATORS_FOLDER, ioc_file_name))
|
||||
|
||||
def _check_stix2_env_variable(self) -> None:
|
||||
"""
|
||||
Checks if a variable MVT_STIX2 contains path to a STIX files.
|
||||
"""
|
||||
if "MVT_STIX2" not in os.environ:
|
||||
return
|
||||
|
||||
paths = os.environ["MVT_STIX2"].split(":")
|
||||
for path in paths:
|
||||
if os.path.isfile(path):
|
||||
self.parse_stix2(path)
|
||||
else:
|
||||
self.log.error("Path specified with env MVT_STIX2 is not a valid file: %s",
|
||||
path)
|
||||
|
||||
def _new_collection(self, cid: str = "", name: str = "", description: str = "",
|
||||
file_name: str = "", file_path: str = "") -> dict:
|
||||
return {
|
||||
"id": cid,
|
||||
"name": name,
|
||||
"description": description,
|
||||
"stix2_file_name": file_name,
|
||||
"stix2_file_path": file_path,
|
||||
"domains": [],
|
||||
"processes": [],
|
||||
"emails": [],
|
||||
"file_names": [],
|
||||
"file_paths": [],
|
||||
"files_sha256": [],
|
||||
"app_ids": [],
|
||||
"ios_profile_ids": [],
|
||||
"count": 0,
|
||||
}
|
||||
|
||||
def _add_indicator(self, ioc: str, ioc_coll: dict, ioc_coll_list: list) -> None:
|
||||
ioc = ioc.strip("'")
|
||||
if ioc not in ioc_coll_list:
|
||||
ioc_coll_list.append(ioc)
|
||||
ioc_coll["count"] += 1
|
||||
self.total_ioc_count += 1
|
||||
|
||||
def parse_stix2(self, file_path: str) -> None:
|
||||
"""Extract indicators from a STIX2 file.
|
||||
|
||||
:param file_path: Path to the STIX2 file to parse
|
||||
:type file_path: str
|
||||
|
||||
"""
|
||||
self.log.info("Parsing STIX2 indicators file at path %s",
|
||||
file_path)
|
||||
self.log.info("Parsing STIX2 indicators file at path %s", file_path)
|
||||
|
||||
with open(file_path, "r") as handle:
|
||||
with open(file_path, "r", encoding="utf-8") as handle:
|
||||
try:
|
||||
data = json.load(handle)
|
||||
except json.decoder.JSONDecodeError:
|
||||
raise IndicatorsFileBadFormat("Unable to parse STIX2 indicators file, the file seems malformed or in the wrong format")
|
||||
self.log.critical("Unable to parse STIX2 indicator file. "
|
||||
"The file is corrupted or in the wrong format!")
|
||||
return
|
||||
|
||||
malware = {}
|
||||
indicators = []
|
||||
relationships = []
|
||||
for entry in data.get("objects", []):
|
||||
if entry.get("type", "") != "indicator":
|
||||
continue
|
||||
entry_type = entry.get("type", "")
|
||||
if entry_type == "malware":
|
||||
malware[entry["id"]] = {
|
||||
"name": entry["name"],
|
||||
"description": entry.get("description", ""),
|
||||
}
|
||||
elif entry_type == "indicator":
|
||||
indicators.append(entry)
|
||||
elif entry_type == "relationship":
|
||||
relationships.append(entry)
|
||||
|
||||
key, value = entry.get("pattern", "").strip("[]").split("=")
|
||||
value = value.strip("'")
|
||||
collections = []
|
||||
for mal_id, mal_values in malware.items():
|
||||
collection = self._new_collection(mal_id, mal_values.get("name"),
|
||||
mal_values.get("description"),
|
||||
os.path.basename(file_path),
|
||||
file_path)
|
||||
collections.append(collection)
|
||||
|
||||
if key == "domain-name:value":
|
||||
# We force domain names to lower case.
|
||||
self._add_indicator(ioc=value.lower(),
|
||||
iocs_list=self.ioc_domains)
|
||||
elif key == "process:name":
|
||||
self._add_indicator(ioc=value,
|
||||
iocs_list=self.ioc_processes)
|
||||
elif key == "email-addr:value":
|
||||
# We force email addresses to lower case.
|
||||
self._add_indicator(ioc=value.lower(),
|
||||
iocs_list=self.ioc_emails)
|
||||
elif key == "file:name":
|
||||
self._add_indicator(ioc=value,
|
||||
iocs_list=self.ioc_files)
|
||||
elif key == "app:id":
|
||||
self._add_indicator(ioc=value,
|
||||
iocs_list=self.ioc_app_ids)
|
||||
elif key == "file:hashes.sha256":
|
||||
self._add_indicator(ioc=value,
|
||||
iocs_list=self.ioc_files_sha256)
|
||||
# We loop through all indicators.
|
||||
for indicator in indicators:
|
||||
malware_id = None
|
||||
|
||||
def check_domain(self, url) -> bool:
|
||||
# We loop through all relationships and find the one pertinent to
|
||||
# the current indicator.
|
||||
for relationship in relationships:
|
||||
if relationship["source_ref"] != indicator["id"]:
|
||||
continue
|
||||
|
||||
# Look for a malware definition with the correct identifier.
|
||||
if relationship["target_ref"] in malware.keys():
|
||||
malware_id = relationship["target_ref"]
|
||||
break
|
||||
|
||||
# Now we look for the correct collection matching the malware ID we
|
||||
# got from the relationship.
|
||||
for collection in collections:
|
||||
if collection["id"] != malware_id:
|
||||
continue
|
||||
|
||||
key, value = indicator.get("pattern", "").strip("[]").split("=")
|
||||
|
||||
if key == "domain-name:value":
|
||||
# We force domain names to lower case.
|
||||
self._add_indicator(ioc=value.lower(),
|
||||
ioc_coll=collection,
|
||||
ioc_coll_list=collection["domains"])
|
||||
elif key == "process:name":
|
||||
self._add_indicator(ioc=value,
|
||||
ioc_coll=collection,
|
||||
ioc_coll_list=collection["processes"])
|
||||
elif key == "email-addr:value":
|
||||
# We force email addresses to lower case.
|
||||
self._add_indicator(ioc=value.lower(),
|
||||
ioc_coll=collection,
|
||||
ioc_coll_list=collection["emails"])
|
||||
elif key == "file:name":
|
||||
self._add_indicator(ioc=value,
|
||||
ioc_coll=collection,
|
||||
ioc_coll_list=collection["file_names"])
|
||||
elif key == "file:path":
|
||||
self._add_indicator(ioc=value,
|
||||
ioc_coll=collection,
|
||||
ioc_coll_list=collection["file_paths"])
|
||||
elif key == "file:hashes.sha256":
|
||||
self._add_indicator(ioc=value,
|
||||
ioc_coll=collection,
|
||||
ioc_coll_list=collection["files_sha256"])
|
||||
elif key == "app:id":
|
||||
self._add_indicator(ioc=value,
|
||||
ioc_coll=collection,
|
||||
ioc_coll_list=collection["app_ids"])
|
||||
elif key == "configuration-profile:id":
|
||||
self._add_indicator(ioc=value,
|
||||
ioc_coll=collection,
|
||||
ioc_coll_list=collection["ios_profile_ids"])
|
||||
|
||||
break
|
||||
|
||||
for coll in collections:
|
||||
self.log.info("Extracted %d indicators for collection with name \"%s\"",
|
||||
coll["count"], coll["name"])
|
||||
|
||||
self.ioc_collections.extend(collections)
|
||||
|
||||
def load_indicators_files(self, files: list, load_default: bool = True) -> None:
|
||||
"""
|
||||
Load a list of indicators files.
|
||||
"""
|
||||
for file_path in files:
|
||||
if os.path.isfile(file_path):
|
||||
self.parse_stix2(file_path)
|
||||
else:
|
||||
self.log.warning("No indicators file exists at path %s",
|
||||
file_path)
|
||||
|
||||
# Load downloaded indicators and any indicators from env variable.
|
||||
if load_default:
|
||||
self._load_downloaded_indicators()
|
||||
|
||||
self._check_stix2_env_variable()
|
||||
self.log.info("Loaded a total of %d unique indicators", self.total_ioc_count)
|
||||
|
||||
def get_iocs(self, ioc_type: str) -> dict:
|
||||
for ioc_collection in self.ioc_collections:
|
||||
for ioc in ioc_collection.get(ioc_type, []):
|
||||
yield {
|
||||
"value": ioc,
|
||||
"type": ioc_type,
|
||||
"name": ioc_collection["name"],
|
||||
"stix2_file_name": ioc_collection["stix2_file_name"],
|
||||
}
|
||||
|
||||
def check_domain(self, url: str) -> dict:
|
||||
"""Check if a given URL matches any of the provided domain indicators.
|
||||
|
||||
:param url: URL to match against domain indicators
|
||||
:type url: str
|
||||
:returns: True if the URL matched an indicator, otherwise False
|
||||
:rtype: bool
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
# TODO: If the IOC domain contains a subdomain, it is not currently
|
||||
# being matched.
|
||||
# being matched.
|
||||
if not url:
|
||||
return False
|
||||
return None
|
||||
|
||||
try:
|
||||
# First we use the provided URL.
|
||||
@@ -115,138 +244,206 @@ class Indicators:
|
||||
else:
|
||||
# If it's not shortened, we just use the original URL object.
|
||||
final_url = orig_url
|
||||
except Exception as e:
|
||||
except Exception:
|
||||
# If URL parsing failed, we just try to do a simple substring
|
||||
# match.
|
||||
for ioc in self.ioc_domains:
|
||||
if ioc.lower() in url:
|
||||
self.log.warning("Maybe found a known suspicious domain: %s", url)
|
||||
return True
|
||||
for ioc in self.get_iocs("domains"):
|
||||
if ioc["value"].lower() in url:
|
||||
self.log.warning("Maybe found a known suspicious domain %s matching indicators from \"%s\"",
|
||||
url, ioc["name"])
|
||||
return ioc
|
||||
|
||||
# If nothing matched, we can quit here.
|
||||
return False
|
||||
return None
|
||||
|
||||
# If all parsing worked, we start walking through available domain indicators.
|
||||
for ioc in self.ioc_domains:
|
||||
for ioc in self.get_iocs("domains"):
|
||||
# First we check the full domain.
|
||||
if final_url.domain.lower() == ioc:
|
||||
if final_url.domain.lower() == ioc["value"]:
|
||||
if orig_url.is_shortened and orig_url.url != final_url.url:
|
||||
self.log.warning("Found a known suspicious domain %s shortened as %s",
|
||||
final_url.url, orig_url.url)
|
||||
self.log.warning("Found a known suspicious domain %s shortened as %s matching indicators from \"%s\"",
|
||||
final_url.url, orig_url.url, ioc["name"])
|
||||
else:
|
||||
self.log.warning("Found a known suspicious domain: %s", final_url.url)
|
||||
self.log.warning("Found a known suspicious domain %s matching indicators from \"%s\"",
|
||||
final_url.url, ioc["name"])
|
||||
|
||||
return True
|
||||
return ioc
|
||||
|
||||
# Then we just check the top level domain.
|
||||
if final_url.top_level.lower() == ioc:
|
||||
if final_url.top_level.lower() == ioc["value"]:
|
||||
if orig_url.is_shortened and orig_url.url != final_url.url:
|
||||
self.log.warning("Found a sub-domain matching a known suspicious top level %s shortened as %s",
|
||||
final_url.url, orig_url.url)
|
||||
self.log.warning("Found a sub-domain with suspicious top level %s shortened as %s matching indicators from \"%s\"",
|
||||
final_url.url, orig_url.url, ioc["name"])
|
||||
else:
|
||||
self.log.warning("Found a sub-domain matching a known suspicious top level: %s", final_url.url)
|
||||
self.log.warning("Found a sub-domain with a suspicious top level %s matching indicators from \"%s\"",
|
||||
final_url.url, ioc["name"])
|
||||
|
||||
return True
|
||||
return ioc
|
||||
|
||||
return False
|
||||
|
||||
def check_domains(self, urls) -> bool:
|
||||
def check_domains(self, urls: list) -> dict:
|
||||
"""Check a list of URLs against the provided list of domain indicators.
|
||||
|
||||
:param urls: List of URLs to check against domain indicators
|
||||
:type urls: list
|
||||
:returns: True if any URL matched an indicator, otherwise False
|
||||
:rtype: bool
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
if not urls:
|
||||
return False
|
||||
return None
|
||||
|
||||
for url in urls:
|
||||
if self.check_domain(url):
|
||||
return True
|
||||
check = self.check_domain(url)
|
||||
if check:
|
||||
return check
|
||||
|
||||
return False
|
||||
|
||||
def check_process(self, process) -> bool:
|
||||
def check_process(self, process: str) -> dict:
|
||||
"""Check the provided process name against the list of process
|
||||
indicators.
|
||||
|
||||
:param process: Process name to check against process indicators
|
||||
:type process: str
|
||||
:returns: True if process matched an indicator, otherwise False
|
||||
:rtype: bool
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
if not process:
|
||||
return False
|
||||
return None
|
||||
|
||||
proc_name = os.path.basename(process)
|
||||
if proc_name in self.ioc_processes:
|
||||
self.log.warning("Found a known suspicious process name \"%s\"", process)
|
||||
return True
|
||||
for ioc in self.get_iocs("processes"):
|
||||
if proc_name == ioc["value"]:
|
||||
self.log.warning("Found a known suspicious process name \"%s\" matching indicators from \"%s\"",
|
||||
process, ioc["name"])
|
||||
return ioc
|
||||
|
||||
if len(proc_name) == 16:
|
||||
for bad_proc in self.ioc_processes:
|
||||
if bad_proc.startswith(proc_name):
|
||||
self.log.warning("Found a truncated known suspicious process name \"%s\"", process)
|
||||
return True
|
||||
if len(proc_name) == 16:
|
||||
if ioc["value"].startswith(proc_name):
|
||||
self.log.warning("Found a truncated known suspicious process name \"%s\" matching indicators from \"%s\"",
|
||||
process, ioc["name"])
|
||||
return ioc
|
||||
|
||||
return False
|
||||
|
||||
def check_processes(self, processes) -> bool:
|
||||
def check_processes(self, processes: list) -> dict:
|
||||
"""Check the provided list of processes against the list of
|
||||
process indicators.
|
||||
|
||||
:param processes: List of processes to check against process indicators
|
||||
:type processes: list
|
||||
:returns: True if process matched an indicator, otherwise False
|
||||
:rtype: bool
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
if not processes:
|
||||
return False
|
||||
return None
|
||||
|
||||
for process in processes:
|
||||
if self.check_process(process):
|
||||
return True
|
||||
check = self.check_process(process)
|
||||
if check:
|
||||
return check
|
||||
|
||||
return False
|
||||
|
||||
def check_email(self, email) -> bool:
|
||||
def check_email(self, email: str) -> dict:
|
||||
"""Check the provided email against the list of email indicators.
|
||||
|
||||
:param email: Email address to check against email indicators
|
||||
:type email: str
|
||||
:returns: True if email address matched an indicator, otherwise False
|
||||
:rtype: bool
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
if not email:
|
||||
return False
|
||||
return None
|
||||
|
||||
if email.lower() in self.ioc_emails:
|
||||
self.log.warning("Found a known suspicious email address: \"%s\"", email)
|
||||
return True
|
||||
for ioc in self.get_iocs("emails"):
|
||||
if email.lower() == ioc["value"].lower():
|
||||
self.log.warning("Found a known suspicious email address \"%s\" matching indicators from \"%s\"",
|
||||
email, ioc["name"])
|
||||
return ioc
|
||||
|
||||
return False
|
||||
def check_file_name(self, file_name: str) -> dict:
|
||||
"""Check the provided file name against the list of file indicators.
|
||||
|
||||
def check_file(self, file_path) -> bool:
|
||||
"""Check the provided file path against the list of file indicators.
|
||||
:param file_name: File name to check against file
|
||||
indicators
|
||||
:type file_name: str
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
if not file_name:
|
||||
return None
|
||||
|
||||
for ioc in self.get_iocs("file_names"):
|
||||
if ioc["value"] == file_name:
|
||||
self.log.warning("Found a known suspicious file name \"%s\" matching indicators from \"%s\"",
|
||||
file_name, ioc["name"])
|
||||
return ioc
|
||||
|
||||
def check_file_path(self, file_path: str) -> dict:
|
||||
"""Check the provided file path against the list of file indicators (both path and name).
|
||||
|
||||
:param file_path: File path or file name to check against file
|
||||
indicators
|
||||
:type file_path: str
|
||||
:returns: True if the file path matched an indicator, otherwise False
|
||||
:rtype: bool
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
if not file_path:
|
||||
return False
|
||||
return None
|
||||
|
||||
file_name = os.path.basename(file_path)
|
||||
if file_name in self.ioc_files:
|
||||
self.log.warning("Found a known suspicious file: \"%s\"", file_path)
|
||||
return True
|
||||
ioc = self.check_file_name(os.path.basename(file_path))
|
||||
if ioc:
|
||||
return ioc
|
||||
|
||||
return False
|
||||
for ioc in self.get_iocs("file_paths"):
|
||||
# Strip any trailing slash from indicator paths to match directories.
|
||||
if file_path.startswith(ioc["value"].rstrip("/")):
|
||||
self.log.warning("Found a known suspicious file path \"%s\" matching indicators form \"%s\"",
|
||||
file_path, ioc["name"])
|
||||
return ioc
|
||||
|
||||
def check_profile(self, profile_uuid: str) -> dict:
|
||||
"""Check the provided configuration profile UUID against the list of indicators.
|
||||
|
||||
:param profile_uuid: Profile UUID to check against configuration profile indicators
|
||||
:type profile_uuid: str
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
if not profile_uuid:
|
||||
return None
|
||||
|
||||
for ioc in self.get_iocs("ios_profile_ids"):
|
||||
if profile_uuid in ioc["value"]:
|
||||
self.log.warning("Found a known suspicious profile ID \"%s\" matching indicators from \"%s\"",
|
||||
profile_uuid, ioc["name"])
|
||||
return ioc
|
||||
|
||||
def check_file_hash(self, file_hash: str) -> dict:
|
||||
"""Check the provided SHA256 file hash against the list of indicators.
|
||||
|
||||
:param file_hash: SHA256 hash to check
|
||||
:type file_hash: str
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
if not file_hash:
|
||||
return None
|
||||
|
||||
for ioc in self.get_iocs("files_sha256"):
|
||||
if file_hash.lower() == ioc["value"].lower():
|
||||
self.log.warning("Found a known suspicious file with hash \"%s\" matching indicators from \"%s\"",
|
||||
file_hash, ioc["name"])
|
||||
return ioc
|
||||
|
||||
def check_app_id(self, app_id: str) -> dict:
|
||||
"""Check the provided app identifier (typically an Android package name)
|
||||
against the list of indicators.
|
||||
|
||||
:param app_id: App ID to check against the list of indicators
|
||||
:type app_id: str
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
if not app_id:
|
||||
return None
|
||||
|
||||
for ioc in self.get_iocs("app_ids"):
|
||||
if app_id.lower() == ioc["value"].lower():
|
||||
self.log.warning("Found a known suspicious app with ID \"%s\" matching indicators from \"%s\"",
|
||||
app_id, ioc["name"])
|
||||
return ioc
|
||||
|
||||
@@ -1,25 +1,59 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
from rich import print
|
||||
|
||||
from .version import MVT_VERSION, check_for_updates
|
||||
from .updates import IndicatorsUpdates, MVTUpdates
|
||||
from .version import MVT_VERSION
|
||||
|
||||
|
||||
def logo():
|
||||
print("\n")
|
||||
print("\t[bold]MVT[/bold] - Mobile Verification Toolkit")
|
||||
print("\t\thttps://mvt.re")
|
||||
print(f"\t\tVersion: {MVT_VERSION}")
|
||||
|
||||
def check_updates() -> None:
|
||||
# First we check for MVT version udpates.
|
||||
mvt_updates = MVTUpdates()
|
||||
try:
|
||||
latest_version = check_for_updates()
|
||||
except:
|
||||
latest_version = mvt_updates.check()
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
if latest_version:
|
||||
print(f"\t\t[bold]Version {latest_version} is available! Upgrade mvt![/bold]")
|
||||
|
||||
# Then we check for indicators files updates.
|
||||
ioc_updates = IndicatorsUpdates()
|
||||
|
||||
# Before proceeding, we check if we have downloaded an indicators index.
|
||||
# If not, there's no point in proceeding with the updates check.
|
||||
if ioc_updates.get_latest_update() == 0:
|
||||
print("\t\t[bold]You have not yet downloaded any indicators, check the `download-iocs` command![/bold]")
|
||||
return
|
||||
|
||||
# We only perform this check at a fixed frequency, in order to not
|
||||
# overburden the user with too many lookups if the command is being run
|
||||
# multiple times.
|
||||
should_check, hours = ioc_updates.should_check()
|
||||
if not should_check:
|
||||
print(f"\t\tIndicators updates checked recently, next automatic check in {int(hours)} hours")
|
||||
return
|
||||
|
||||
try:
|
||||
ioc_to_update = ioc_updates.check()
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
if ioc_to_update:
|
||||
print("\t\t[bold]There are updates to your indicators files! Run the `download-iocs` command to update![/bold]")
|
||||
else:
|
||||
print("\t\tYour indicators files seem to be up to date.")
|
||||
|
||||
|
||||
def logo() -> None:
|
||||
print("\n")
|
||||
print("\t[bold]MVT[/bold] - Mobile Verification Toolkit")
|
||||
print("\t\thttps://mvt.re")
|
||||
print(f"\t\tVersion: {MVT_VERSION}")
|
||||
|
||||
check_updates()
|
||||
|
||||
print("\n")
|
||||
|
||||
@@ -1,43 +1,46 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import csv
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from typing import Callable
|
||||
|
||||
import simplejson as json
|
||||
|
||||
from .indicators import Indicators
|
||||
|
||||
|
||||
class DatabaseNotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DatabaseCorruptedError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class InsufficientPrivileges(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class MVTModule(object):
|
||||
"""This class provides a base for all extraction modules."""
|
||||
|
||||
enabled = True
|
||||
slug = None
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = None):
|
||||
"""Initialize module.
|
||||
|
||||
:param file_path: Path to the module's database file, if there is any
|
||||
:type file_path: str
|
||||
:param base_folder: Path to the base folder (backup or filesystem dump)
|
||||
:param target_path: Path to the target folder (backup or filesystem dump)
|
||||
:type file_path: str
|
||||
:param output_folder: Folder where results will be stored
|
||||
:type output_folder: str
|
||||
:param results_path: Folder where results will be stored
|
||||
:type results_path: str
|
||||
:param fast_mode: Flag to enable or disable slow modules
|
||||
:type fast_mode: bool
|
||||
:param log: Handle to logger
|
||||
@@ -45,26 +48,26 @@ class MVTModule(object):
|
||||
:type results: list
|
||||
"""
|
||||
self.file_path = file_path
|
||||
self.base_folder = base_folder
|
||||
self.output_folder = output_folder
|
||||
self.target_path = target_path
|
||||
self.results_path = results_path
|
||||
self.fast_mode = fast_mode
|
||||
self.log = log
|
||||
self.indicators = None
|
||||
self.results = results
|
||||
self.results = results if results else []
|
||||
self.detected = []
|
||||
self.timeline = []
|
||||
self.timeline_detected = []
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json_path, log=None):
|
||||
with open(json_path, "r") as handle:
|
||||
def from_json(cls, json_path: str, log: logging.Logger = None):
|
||||
with open(json_path, "r", encoding="utf-8") as handle:
|
||||
results = json.load(handle)
|
||||
if log:
|
||||
log.info("Loaded %d results from \"%s\"",
|
||||
len(results), json_path)
|
||||
return cls(results=results, log=log)
|
||||
|
||||
def get_slug(self):
|
||||
def get_slug(self) -> str:
|
||||
"""Use the module's class name to retrieve a slug"""
|
||||
if self.slug:
|
||||
return self.slug
|
||||
@@ -72,7 +75,7 @@ class MVTModule(object):
|
||||
sub = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", self.__class__.__name__)
|
||||
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", sub).lower()
|
||||
|
||||
def check_indicators(self):
|
||||
def check_indicators(self) -> None:
|
||||
"""Check the results of this module against a provided list of
|
||||
indicators.
|
||||
|
||||
@@ -80,17 +83,17 @@ class MVTModule(object):
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def save_to_json(self):
|
||||
def save_to_json(self) -> None:
|
||||
"""Save the collected results to a json file."""
|
||||
if not self.output_folder:
|
||||
if not self.results_path:
|
||||
return
|
||||
|
||||
name = self.get_slug()
|
||||
|
||||
if self.results:
|
||||
results_file_name = f"{name}.json"
|
||||
results_json_path = os.path.join(self.output_folder, results_file_name)
|
||||
with io.open(results_json_path, "w", encoding="utf-8") as handle:
|
||||
results_json_path = os.path.join(self.results_path, results_file_name)
|
||||
with open(results_json_path, "w", encoding="utf-8") as handle:
|
||||
try:
|
||||
json.dump(self.results, handle, indent=4, default=str)
|
||||
except Exception as e:
|
||||
@@ -99,15 +102,15 @@ class MVTModule(object):
|
||||
|
||||
if self.detected:
|
||||
detected_file_name = f"{name}_detected.json"
|
||||
detected_json_path = os.path.join(self.output_folder, detected_file_name)
|
||||
with io.open(detected_json_path, "w", encoding="utf-8") as handle:
|
||||
detected_json_path = os.path.join(self.results_path, detected_file_name)
|
||||
with open(detected_json_path, "w", encoding="utf-8") as handle:
|
||||
json.dump(self.detected, handle, indent=4, default=str)
|
||||
|
||||
def serialize(self, record):
|
||||
def serialize(self, record: dict) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
@staticmethod
|
||||
def _deduplicate_timeline(timeline):
|
||||
def _deduplicate_timeline(timeline: list) -> list:
|
||||
"""Serialize entry as JSON to deduplicate repeated entries
|
||||
|
||||
:param timeline: List of entries from timeline to deduplicate
|
||||
@@ -118,7 +121,7 @@ class MVTModule(object):
|
||||
timeline_set.add(json.dumps(record, sort_keys=True))
|
||||
return [json.loads(record) for record in timeline_set]
|
||||
|
||||
def to_timeline(self):
|
||||
def to_timeline(self) -> None:
|
||||
"""Convert results into a timeline."""
|
||||
for result in self.results:
|
||||
record = self.serialize(result)
|
||||
@@ -140,12 +143,12 @@ class MVTModule(object):
|
||||
self.timeline = self._deduplicate_timeline(self.timeline)
|
||||
self.timeline_detected = self._deduplicate_timeline(self.timeline_detected)
|
||||
|
||||
def run(self):
|
||||
def run(self) -> None:
|
||||
"""Run the main module procedure."""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def run_module(module):
|
||||
def run_module(module: Callable) -> None:
|
||||
module.log.info("Running module %s...", module.__class__.__name__)
|
||||
|
||||
try:
|
||||
@@ -184,15 +187,16 @@ def run_module(module):
|
||||
module.save_to_json()
|
||||
|
||||
|
||||
def save_timeline(timeline, timeline_path):
|
||||
def save_timeline(timeline: list, timeline_path: str) -> None:
|
||||
"""Save the timeline in a csv file.
|
||||
|
||||
:param timeline: List of records to order and store
|
||||
:param timeline_path: Path to the csv file to store the timeline to
|
||||
|
||||
"""
|
||||
with io.open(timeline_path, "a+", encoding="utf-8") as handle:
|
||||
csvoutput = csv.writer(handle, delimiter=",", quotechar="\"")
|
||||
with open(timeline_path, "a+", encoding="utf-8") as handle:
|
||||
csvoutput = csv.writer(handle, delimiter=",", quotechar="\"",
|
||||
quoting=csv.QUOTE_ALL)
|
||||
csvoutput.writerow(["UTC Timestamp", "Plugin", "Event", "Description"])
|
||||
for event in sorted(timeline, key=lambda x: x["timestamp"] if x["timestamp"] is not None else ""):
|
||||
csvoutput.writerow([
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
|
||||
211
mvt/common/updates.py
Normal file
211
mvt/common/updates.py
Normal file
@@ -0,0 +1,211 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
import yaml
|
||||
from packaging import version
|
||||
|
||||
from .indicators import MVT_DATA_FOLDER, MVT_INDICATORS_FOLDER
|
||||
from .version import MVT_VERSION
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# In hours.
|
||||
INDICATORS_CHECK_FREQUENCY = 12
|
||||
|
||||
|
||||
class MVTUpdates:
|
||||
|
||||
def check(self) -> str:
|
||||
res = requests.get("https://pypi.org/pypi/mvt/json")
|
||||
data = res.json()
|
||||
latest_version = data.get("info", {}).get("version", "")
|
||||
|
||||
if version.parse(latest_version) > version.parse(MVT_VERSION):
|
||||
return latest_version
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
class IndicatorsUpdates:
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.github_raw_url = "https://raw.githubusercontent.com/{}/{}/{}/{}"
|
||||
|
||||
self.index_owner = "mvt-project"
|
||||
self.index_repo = "mvt-indicators"
|
||||
self.index_branch = "main"
|
||||
self.index_path = "indicators.yaml"
|
||||
|
||||
self.latest_update_path = os.path.join(MVT_DATA_FOLDER,
|
||||
"latest_indicators_update")
|
||||
self.latest_check_path = os.path.join(MVT_DATA_FOLDER,
|
||||
"latest_indicators_check")
|
||||
|
||||
def get_latest_check(self) -> int:
|
||||
if not os.path.exists(self.latest_check_path):
|
||||
return 0
|
||||
|
||||
with open(self.latest_check_path, "r") as handle:
|
||||
data = handle.read().strip()
|
||||
if data:
|
||||
return int(data)
|
||||
|
||||
return 0
|
||||
|
||||
def set_latest_check(self) -> None:
|
||||
timestamp = int(datetime.utcnow().timestamp())
|
||||
with open(self.latest_check_path, "w") as handle:
|
||||
handle.write(str(timestamp))
|
||||
|
||||
def get_latest_update(self) -> int:
|
||||
if not os.path.exists(self.latest_update_path):
|
||||
return 0
|
||||
|
||||
with open(self.latest_update_path, "r") as handle:
|
||||
data = handle.read().strip()
|
||||
if data:
|
||||
return int(data)
|
||||
|
||||
return 0
|
||||
|
||||
def set_latest_update(self) -> None:
|
||||
timestamp = int(datetime.utcnow().timestamp())
|
||||
with open(self.latest_update_path, "w") as handle:
|
||||
handle.write(str(timestamp))
|
||||
|
||||
def get_remote_index(self) -> dict:
|
||||
url = self.github_raw_url.format(self.index_owner, self.index_repo,
|
||||
self.index_branch, self.index_path)
|
||||
res = requests.get(url)
|
||||
if res.status_code != 200:
|
||||
log.error("Failed to retrieve indicators index located at %s (error %d)",
|
||||
url, res.status_code)
|
||||
return None
|
||||
|
||||
return yaml.safe_load(res.content)
|
||||
|
||||
def download_remote_ioc(self, ioc_url: str) -> str:
|
||||
res = requests.get(ioc_url)
|
||||
if res.status_code != 200:
|
||||
log.error("Failed to download indicators file from %s (error %d)",
|
||||
ioc_url, res.status_code)
|
||||
return None
|
||||
|
||||
clean_file_name = ioc_url.lstrip("https://").replace("/", "_")
|
||||
ioc_path = os.path.join(MVT_INDICATORS_FOLDER, clean_file_name)
|
||||
|
||||
with open(ioc_path, "w", encoding="utf-8") as handle:
|
||||
handle.write(res.text)
|
||||
|
||||
return ioc_path
|
||||
|
||||
def update(self) -> None:
|
||||
self.set_latest_check()
|
||||
|
||||
if not os.path.exists(MVT_INDICATORS_FOLDER):
|
||||
os.makedirs(MVT_INDICATORS_FOLDER)
|
||||
|
||||
index = self.get_remote_index()
|
||||
for ioc in index.get("indicators", []):
|
||||
ioc_type = ioc.get("type", "")
|
||||
|
||||
if ioc_type == "github":
|
||||
github = ioc.get("github", {})
|
||||
owner = github.get("owner", "")
|
||||
repo = github.get("repo", "")
|
||||
branch = github.get("branch", "main")
|
||||
path = github.get("path", "")
|
||||
|
||||
ioc_url = self.github_raw_url.format(owner, repo, branch, path)
|
||||
else:
|
||||
ioc_url = ioc.get("download_url", "")
|
||||
|
||||
if not ioc_url:
|
||||
log.error("Could not find a way to download indicator file for %s",
|
||||
ioc.get("name"))
|
||||
continue
|
||||
|
||||
ioc_local_path = self.download_remote_ioc(ioc_url)
|
||||
if not ioc_local_path:
|
||||
continue
|
||||
|
||||
log.info("Downloaded indicators \"%s\" to %s",
|
||||
ioc.get("name"), ioc_local_path)
|
||||
|
||||
self.set_latest_update()
|
||||
|
||||
def _get_remote_file_latest_commit(self, owner: str, repo: str,
|
||||
branch: str, path: str) -> bool:
|
||||
file_commit_url = f"https://api.github.com/repos/{self.index_owner}/{self.index_repo}/commits?path={self.index_path}"
|
||||
res = requests.get(file_commit_url)
|
||||
if res.status_code != 200:
|
||||
log.error("Failed to get details about file %s (error %d)",
|
||||
file_commit_url, res.status_code)
|
||||
return False
|
||||
|
||||
details = res.json()
|
||||
if len(details) == 0:
|
||||
return False
|
||||
|
||||
latest_commit = details[0]
|
||||
latest_commit_date = latest_commit.get("commit", {}).get("author", {}).get("date", None)
|
||||
if not latest_commit_date:
|
||||
log.error("Failed to retrieve date of latest update to indicators index file")
|
||||
return False
|
||||
|
||||
latest_commit_dt = datetime.strptime(latest_commit_date, '%Y-%m-%dT%H:%M:%SZ')
|
||||
latest_commit_ts = int(latest_commit_dt.timestamp())
|
||||
|
||||
return latest_commit_ts
|
||||
|
||||
def should_check(self) -> (bool, int):
|
||||
now = datetime.utcnow()
|
||||
latest_check_ts = self.get_latest_check()
|
||||
latest_check_dt = datetime.fromtimestamp(latest_check_ts)
|
||||
|
||||
diff = now - latest_check_dt
|
||||
diff_hours = divmod(diff.total_seconds(), 3600)[0]
|
||||
|
||||
if diff_hours >= INDICATORS_CHECK_FREQUENCY:
|
||||
return True, 0
|
||||
|
||||
return False, INDICATORS_CHECK_FREQUENCY - diff_hours
|
||||
|
||||
def check(self) -> bool:
|
||||
self.set_latest_check()
|
||||
|
||||
latest_update = self.get_latest_update()
|
||||
latest_commit_ts = self._get_remote_file_latest_commit(self.index_owner,
|
||||
self.index_repo,
|
||||
self.index_branch,
|
||||
self.index_path)
|
||||
|
||||
if latest_update < latest_commit_ts:
|
||||
return True
|
||||
|
||||
index = self.get_remote_index()
|
||||
for ioc in index.get("indicators", []):
|
||||
if ioc.get("type", "") != "github":
|
||||
continue
|
||||
|
||||
github = ioc.get("github", {})
|
||||
owner = github.get("owner", "")
|
||||
repo = github.get("repo", "")
|
||||
branch = github.get("branch", "main")
|
||||
path = github.get("path", "")
|
||||
|
||||
file_latest_commit_ts = self._get_remote_file_latest_commit(owner,
|
||||
repo,
|
||||
branch,
|
||||
path)
|
||||
if latest_update < file_latest_commit_ts:
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -1,9 +1,10 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import requests
|
||||
from typing import Optional
|
||||
from tld import get_tld
|
||||
|
||||
SHORTENER_DOMAINS = [
|
||||
@@ -250,9 +251,10 @@ SHORTENER_DOMAINS = [
|
||||
"zz.gd",
|
||||
]
|
||||
|
||||
|
||||
class URL:
|
||||
|
||||
def __init__(self, url):
|
||||
def __init__(self, url: str) -> None:
|
||||
if type(url) == bytes:
|
||||
url = url.decode()
|
||||
|
||||
@@ -261,7 +263,7 @@ class URL:
|
||||
self.top_level = self.get_top_level()
|
||||
self.is_shortened = False
|
||||
|
||||
def get_domain(self):
|
||||
def get_domain(self) -> None:
|
||||
"""Get the domain from a URL.
|
||||
|
||||
:param url: URL to parse
|
||||
@@ -272,11 +274,13 @@ class URL:
|
||||
"""
|
||||
# TODO: Properly handle exception.
|
||||
try:
|
||||
return get_tld(self.url, as_object=True, fix_protocol=True).parsed_url.netloc.lower().lstrip("www.")
|
||||
except:
|
||||
return get_tld(self.url,
|
||||
as_object=True,
|
||||
fix_protocol=True).parsed_url.netloc.lower().lstrip("www.")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_top_level(self):
|
||||
def get_top_level(self) -> None:
|
||||
"""Get only the top-level domain from a URL.
|
||||
|
||||
:param url: URL to parse
|
||||
@@ -288,7 +292,7 @@ class URL:
|
||||
# TODO: Properly handle exception.
|
||||
try:
|
||||
return get_tld(self.url, as_object=True, fix_protocol=True).fld.lower()
|
||||
except:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def check_if_shortened(self) -> bool:
|
||||
@@ -305,7 +309,7 @@ class URL:
|
||||
|
||||
return self.is_shortened
|
||||
|
||||
def unshorten(self):
|
||||
def unshorten(self) -> Optional[str]:
|
||||
"""Unshorten the URL by requesting an HTTP HEAD response."""
|
||||
res = requests.head(self.url)
|
||||
if str(res.status_code).startswith("30"):
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
@@ -8,7 +8,7 @@ import hashlib
|
||||
import re
|
||||
|
||||
|
||||
def convert_mactime_to_unix(timestamp, from_2001=True):
|
||||
def convert_mactime_to_unix(timestamp, from_2001: bool = True):
|
||||
"""Converts Mac Standard Time to a Unix timestamp.
|
||||
|
||||
:param timestamp: MacTime timestamp (either int or float).
|
||||
@@ -37,7 +37,7 @@ def convert_mactime_to_unix(timestamp, from_2001=True):
|
||||
return None
|
||||
|
||||
|
||||
def convert_chrometime_to_unix(timestamp):
|
||||
def convert_chrometime_to_unix(timestamp: int) -> int:
|
||||
"""Converts Chrome timestamp to a Unix timestamp.
|
||||
|
||||
:param timestamp: Chrome timestamp as int.
|
||||
@@ -45,12 +45,12 @@ def convert_chrometime_to_unix(timestamp):
|
||||
:returns: Unix epoch timestamp.
|
||||
|
||||
"""
|
||||
epoch_start = datetime.datetime(1601, 1 , 1)
|
||||
epoch_start = datetime.datetime(1601, 1, 1)
|
||||
delta = datetime.timedelta(microseconds=timestamp)
|
||||
return epoch_start + delta
|
||||
|
||||
|
||||
def convert_timestamp_to_iso(timestamp):
|
||||
def convert_timestamp_to_iso(timestamp: str) -> str:
|
||||
"""Converts Unix timestamp to ISO string.
|
||||
|
||||
:param timestamp: Unix timestamp.
|
||||
@@ -64,7 +64,8 @@ def convert_timestamp_to_iso(timestamp):
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def check_for_links(text):
|
||||
|
||||
def check_for_links(text: str) -> list:
|
||||
"""Checks if a given text contains HTTP links.
|
||||
|
||||
:param text: Any provided text.
|
||||
@@ -72,9 +73,10 @@ def check_for_links(text):
|
||||
:returns: Search results.
|
||||
|
||||
"""
|
||||
return re.findall("(?P<url>https?://[^\s]+)", text, re.IGNORECASE)
|
||||
return re.findall(r"(?P<url>https?://[^\s]+)", text, re.IGNORECASE)
|
||||
|
||||
def get_sha256_from_file_path(file_path):
|
||||
|
||||
def get_sha256_from_file_path(file_path: str) -> str:
|
||||
"""Calculate the SHA256 hash of a file from a file path.
|
||||
|
||||
:param file_path: Path to the file to hash
|
||||
@@ -88,9 +90,10 @@ def get_sha256_from_file_path(file_path):
|
||||
|
||||
return sha256_hash.hexdigest()
|
||||
|
||||
|
||||
# Note: taken from here:
|
||||
# https://stackoverflow.com/questions/57014259/json-dumps-on-dictionary-with-bytes-for-keys
|
||||
def keys_bytes_to_string(obj):
|
||||
def keys_bytes_to_string(obj) -> str:
|
||||
"""Convert object keys from bytes to string.
|
||||
|
||||
:param obj: Object to convert from bytes to string.
|
||||
|
||||
@@ -1,19 +1,6 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import requests
|
||||
from packaging import version
|
||||
|
||||
MVT_VERSION = "1.2.14"
|
||||
|
||||
def check_for_updates():
|
||||
res = requests.get("https://pypi.org/pypi/mvt/json")
|
||||
data = res.json()
|
||||
latest_version = data.get("info", {}).get("version", "")
|
||||
|
||||
if version.parse(latest_version) > version.parse(MVT_VERSION):
|
||||
return latest_version
|
||||
|
||||
return None
|
||||
MVT_VERSION = "2.1.1"
|
||||
|
||||
45
mvt/common/virustotal.py
Normal file
45
mvt/common/virustotal.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import requests
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
MVT_VT_API_KEY = "MVT_VT_API_KEY"
|
||||
|
||||
|
||||
class VTNoKey(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class VTQuotaExceeded(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def virustotal_lookup(file_hash: str):
|
||||
if MVT_VT_API_KEY not in os.environ:
|
||||
raise VTNoKey("No VirusTotal API key provided: to use VirusTotal lookups please provide your API key with `export MVT_VT_API_KEY=<key>`")
|
||||
|
||||
headers = {
|
||||
"User-Agent": "VirusTotal",
|
||||
"Content-Type": "application/json",
|
||||
"x-apikey": os.environ[MVT_VT_API_KEY],
|
||||
}
|
||||
res = requests.get(f"https://www.virustotal.com/api/v3/files/{file_hash}", headers=headers)
|
||||
|
||||
if res.status_code == 200:
|
||||
report = res.json()
|
||||
return report["data"]
|
||||
elif res.status_code == 404:
|
||||
log.info("Could not find results for file with hash %s", file_hash)
|
||||
elif res.status_code == 429:
|
||||
raise VTQuotaExceeded("You have exceeded the quota for your VirusTotal API key")
|
||||
else:
|
||||
raise Exception("Unexpected response from VirusTotal: %s", res.status_code)
|
||||
|
||||
return None
|
||||
@@ -1,5 +1,5 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
|
||||
206
mvt/ios/cli.py
206
mvt/ios/cli.py
@@ -1,5 +1,5 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
@@ -10,12 +10,16 @@ import click
|
||||
from rich.logging import RichHandler
|
||||
from rich.prompt import Prompt
|
||||
|
||||
from mvt.common.help import *
|
||||
from mvt.common.indicators import Indicators, IndicatorsFileBadFormat
|
||||
from mvt.common.cmd_check_iocs import CmdCheckIOCS
|
||||
from mvt.common.help import (HELP_MSG_FAST, HELP_MSG_IOC,
|
||||
HELP_MSG_LIST_MODULES, HELP_MSG_MODULE,
|
||||
HELP_MSG_OUTPUT)
|
||||
from mvt.common.logo import logo
|
||||
from mvt.common.module import run_module, save_timeline
|
||||
from mvt.common.options import MutuallyExclusiveOption
|
||||
from mvt.common.updates import IndicatorsUpdates
|
||||
|
||||
from .cmd_check_backup import CmdIOSCheckBackup
|
||||
from .cmd_check_fs import CmdIOSCheckFS
|
||||
from .decrypt import DecryptBackup
|
||||
from .modules.backup import BACKUP_MODULES
|
||||
from .modules.fs import FS_MODULES
|
||||
@@ -28,7 +32,8 @@ logging.basicConfig(level="INFO", format=LOG_FORMAT, handlers=[
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Set this environment variable to a password if needed.
|
||||
PASSWD_ENV = "MVT_IOS_BACKUP_PASSWORD"
|
||||
MVT_IOS_BACKUP_PASSWORD = "MVT_IOS_BACKUP_PASSWORD"
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Main
|
||||
@@ -53,7 +58,7 @@ def version():
|
||||
@click.option("--destination", "-d", required=True,
|
||||
help="Path to the folder where to store the decrypted backup")
|
||||
@click.option("--password", "-p", cls=MutuallyExclusiveOption,
|
||||
help=f"Password to use to decrypt the backup (or, set {PASSWD_ENV} environment variable)",
|
||||
help=f"Password to use to decrypt the backup (or, set {MVT_IOS_BACKUP_PASSWORD} environment variable)",
|
||||
mutually_exclusive=["key_file"])
|
||||
@click.option("--key-file", "-k", cls=MutuallyExclusiveOption,
|
||||
type=click.Path(exists=True),
|
||||
@@ -65,22 +70,22 @@ def decrypt_backup(ctx, destination, password, key_file, backup_path):
|
||||
backup = DecryptBackup(backup_path, destination)
|
||||
|
||||
if key_file:
|
||||
if PASSWD_ENV in os.environ:
|
||||
if MVT_IOS_BACKUP_PASSWORD in os.environ:
|
||||
log.info("Ignoring environment variable, using --key-file '%s' instead",
|
||||
PASSWD_ENV, key_file)
|
||||
MVT_IOS_BACKUP_PASSWORD, key_file)
|
||||
|
||||
backup.decrypt_with_key_file(key_file)
|
||||
elif password:
|
||||
log.info("Your password may be visible in the process table because it was supplied on the command line!")
|
||||
|
||||
if PASSWD_ENV in os.environ:
|
||||
if MVT_IOS_BACKUP_PASSWORD in os.environ:
|
||||
log.info("Ignoring %s environment variable, using --password argument instead",
|
||||
PASSWD_ENV)
|
||||
MVT_IOS_BACKUP_PASSWORD)
|
||||
|
||||
backup.decrypt_with_password(password)
|
||||
elif PASSWD_ENV in os.environ:
|
||||
log.info("Using password from %s environment variable", PASSWD_ENV)
|
||||
backup.decrypt_with_password(os.environ[PASSWD_ENV])
|
||||
elif MVT_IOS_BACKUP_PASSWORD in os.environ:
|
||||
log.info("Using password from %s environment variable", MVT_IOS_BACKUP_PASSWORD)
|
||||
backup.decrypt_with_password(os.environ[MVT_IOS_BACKUP_PASSWORD])
|
||||
else:
|
||||
sekrit = Prompt.ask("Enter backup password", password=True)
|
||||
backup.decrypt_with_password(sekrit)
|
||||
@@ -96,24 +101,24 @@ def decrypt_backup(ctx, destination, password, key_file, backup_path):
|
||||
#==============================================================================
|
||||
@cli.command("extract-key", help="Extract decryption key from an iTunes backup")
|
||||
@click.option("--password", "-p",
|
||||
help=f"Password to use to decrypt the backup (or, set {PASSWD_ENV} environment variable)")
|
||||
help=f"Password to use to decrypt the backup (or, set {MVT_IOS_BACKUP_PASSWORD} environment variable)")
|
||||
@click.option("--key-file", "-k",
|
||||
help="Key file to be written (if unset, will print to STDOUT)",
|
||||
required=False,
|
||||
type=click.Path(exists=False, file_okay=True, dir_okay=False, writable=True))
|
||||
@click.argument("BACKUP_PATH", type=click.Path(exists=True))
|
||||
def extract_key(password, backup_path, key_file):
|
||||
def extract_key(password, key_file, backup_path):
|
||||
backup = DecryptBackup(backup_path)
|
||||
|
||||
if password:
|
||||
log.info("Your password may be visible in the process table because it was supplied on the command line!")
|
||||
|
||||
if PASSWD_ENV in os.environ:
|
||||
if MVT_IOS_BACKUP_PASSWORD in os.environ:
|
||||
log.info("Ignoring %s environment variable, using --password argument instead",
|
||||
PASSWD_ENV)
|
||||
elif PASSWD_ENV in os.environ:
|
||||
log.info("Using password from %s environment variable", PASSWD_ENV)
|
||||
password = os.environ[PASSWD_ENV]
|
||||
MVT_IOS_BACKUP_PASSWORD)
|
||||
elif MVT_IOS_BACKUP_PASSWORD in os.environ:
|
||||
log.info("Using password from %s environment variable", MVT_IOS_BACKUP_PASSWORD)
|
||||
password = os.environ[MVT_IOS_BACKUP_PASSWORD]
|
||||
else:
|
||||
password = Prompt.ask("Enter backup password", password=True)
|
||||
|
||||
@@ -136,55 +141,21 @@ def extract_key(password, backup_path, key_file):
|
||||
@click.option("--module", "-m", help=HELP_MSG_MODULE)
|
||||
@click.argument("BACKUP_PATH", type=click.Path(exists=True))
|
||||
@click.pass_context
|
||||
def check_backup(ctx, iocs, output, fast, backup_path, list_modules, module):
|
||||
if list_modules:
|
||||
log.info("Following is the list of available check-backup modules:")
|
||||
for backup_module in BACKUP_MODULES + MIXED_MODULES:
|
||||
log.info(" - %s", backup_module.__name__)
|
||||
def check_backup(ctx, iocs, output, fast, list_modules, module, backup_path):
|
||||
cmd = CmdIOSCheckBackup(target_path=backup_path, results_path=output,
|
||||
ioc_files=iocs, module_name=module, fast_mode=fast)
|
||||
|
||||
if list_modules:
|
||||
cmd.list_modules()
|
||||
return
|
||||
|
||||
log.info("Checking iTunes backup located at: %s", backup_path)
|
||||
|
||||
if output and not os.path.exists(output):
|
||||
try:
|
||||
os.makedirs(output)
|
||||
except Exception as e:
|
||||
log.critical("Unable to create output folder %s: %s", output, e)
|
||||
ctx.exit(1)
|
||||
cmd.run()
|
||||
|
||||
indicators = Indicators(log=log)
|
||||
for ioc_path in iocs:
|
||||
try:
|
||||
indicators.parse_stix2(ioc_path)
|
||||
except IndicatorsFileBadFormat as e:
|
||||
log.critical(e)
|
||||
ctx.exit(1)
|
||||
log.info("Loaded a total of %d indicators", indicators.ioc_count)
|
||||
|
||||
timeline = []
|
||||
timeline_detected = []
|
||||
for backup_module in BACKUP_MODULES + MIXED_MODULES:
|
||||
if module and backup_module.__name__ != module:
|
||||
continue
|
||||
|
||||
m = backup_module(base_folder=backup_path, output_folder=output, fast_mode=fast,
|
||||
log=logging.getLogger(backup_module.__module__))
|
||||
m.is_backup = True
|
||||
|
||||
if iocs:
|
||||
m.indicators = indicators
|
||||
m.indicators.log = m.log
|
||||
|
||||
run_module(m)
|
||||
timeline.extend(m.timeline)
|
||||
timeline_detected.extend(m.timeline_detected)
|
||||
|
||||
if output:
|
||||
if len(timeline) > 0:
|
||||
save_timeline(timeline, os.path.join(output, "timeline.csv"))
|
||||
if len(timeline_detected) > 0:
|
||||
save_timeline(timeline_detected, os.path.join(output, "timeline_detected.csv"))
|
||||
if len(cmd.timeline_detected) > 0:
|
||||
log.warning("The analysis of the backup produced %d detections!",
|
||||
len(cmd.timeline_detected))
|
||||
|
||||
|
||||
#==============================================================================
|
||||
@@ -199,56 +170,21 @@ def check_backup(ctx, iocs, output, fast, backup_path, list_modules, module):
|
||||
@click.option("--module", "-m", help=HELP_MSG_MODULE)
|
||||
@click.argument("DUMP_PATH", type=click.Path(exists=True))
|
||||
@click.pass_context
|
||||
def check_fs(ctx, iocs, output, fast, dump_path, list_modules, module):
|
||||
if list_modules:
|
||||
log.info("Following is the list of available check-fs modules:")
|
||||
for fs_module in FS_MODULES + MIXED_MODULES:
|
||||
log.info(" - %s", fs_module.__name__)
|
||||
def check_fs(ctx, iocs, output, fast, list_modules, module, dump_path):
|
||||
cmd = CmdIOSCheckFS(target_path=dump_path, results_path=output,
|
||||
ioc_files=iocs, module_name=module, fast_mode=fast)
|
||||
|
||||
if list_modules:
|
||||
cmd.list_modules()
|
||||
return
|
||||
|
||||
log.info("Checking filesystem dump located at: %s", dump_path)
|
||||
log.info("Checking iOS filesystem located at: %s", dump_path)
|
||||
|
||||
if output and not os.path.exists(output):
|
||||
try:
|
||||
os.makedirs(output)
|
||||
except Exception as e:
|
||||
log.critical("Unable to create output folder %s: %s", output, e)
|
||||
ctx.exit(1)
|
||||
cmd.run()
|
||||
|
||||
indicators = Indicators(log=log)
|
||||
for ioc_path in iocs:
|
||||
try:
|
||||
indicators.parse_stix2(ioc_path)
|
||||
except IndicatorsFileBadFormat as e:
|
||||
log.critical(e)
|
||||
ctx.exit(1)
|
||||
log.info("Loaded a total of %d indicators", indicators.ioc_count)
|
||||
|
||||
timeline = []
|
||||
timeline_detected = []
|
||||
for fs_module in FS_MODULES + MIXED_MODULES:
|
||||
if module and fs_module.__name__ != module:
|
||||
continue
|
||||
|
||||
m = fs_module(base_folder=dump_path, output_folder=output, fast_mode=fast,
|
||||
log=logging.getLogger(fs_module.__module__))
|
||||
|
||||
m.is_fs_dump = True
|
||||
|
||||
if iocs:
|
||||
m.indicators = indicators
|
||||
m.indicators.log = m.log
|
||||
|
||||
run_module(m)
|
||||
timeline.extend(m.timeline)
|
||||
timeline_detected.extend(m.timeline_detected)
|
||||
|
||||
if output:
|
||||
if len(timeline) > 0:
|
||||
save_timeline(timeline, os.path.join(output, "timeline.csv"))
|
||||
if len(timeline_detected) > 0:
|
||||
save_timeline(timeline_detected, os.path.join(output, "timeline_detected.csv"))
|
||||
if len(cmd.timeline_detected) > 0:
|
||||
log.warning("The analysis of the iOS filesystem produced %d detections!",
|
||||
len(cmd.timeline_detected))
|
||||
|
||||
|
||||
#==============================================================================
|
||||
@@ -256,56 +192,26 @@ def check_fs(ctx, iocs, output, fast, dump_path, list_modules, module):
|
||||
#==============================================================================
|
||||
@cli.command("check-iocs", help="Compare stored JSON results to provided indicators")
|
||||
@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True,
|
||||
default=[], required=True, help=HELP_MSG_IOC)
|
||||
default=[], help=HELP_MSG_IOC)
|
||||
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
|
||||
@click.option("--module", "-m", help=HELP_MSG_MODULE)
|
||||
@click.argument("FOLDER", type=click.Path(exists=True))
|
||||
@click.pass_context
|
||||
def check_iocs(ctx, iocs, list_modules, module, folder):
|
||||
all_modules = []
|
||||
for entry in BACKUP_MODULES + FS_MODULES:
|
||||
if entry not in all_modules:
|
||||
all_modules.append(entry)
|
||||
cmd = CmdCheckIOCS(target_path=folder, ioc_files=iocs, module_name=module)
|
||||
cmd.modules = BACKUP_MODULES + FS_MODULES + MIXED_MODULES
|
||||
|
||||
if list_modules:
|
||||
log.info("Following is the list of available check-iocs modules:")
|
||||
for iocs_module in all_modules:
|
||||
log.info(" - %s", iocs_module.__name__)
|
||||
|
||||
cmd.list_modules()
|
||||
return
|
||||
|
||||
log.info("Checking stored results against provided indicators...")
|
||||
cmd.run()
|
||||
|
||||
indicators = Indicators(log=log)
|
||||
for ioc_path in iocs:
|
||||
try:
|
||||
indicators.parse_stix2(ioc_path)
|
||||
except IndicatorsFileBadFormat as e:
|
||||
log.critical(e)
|
||||
ctx.exit(1)
|
||||
log.info("Loaded a total of %d indicators", indicators.ioc_count)
|
||||
|
||||
for file_name in os.listdir(folder):
|
||||
name_only, ext = os.path.splitext(file_name)
|
||||
file_path = os.path.join(folder, file_name)
|
||||
|
||||
for iocs_module in all_modules:
|
||||
if module and iocs_module.__name__ != module:
|
||||
continue
|
||||
|
||||
if iocs_module().get_slug() != name_only:
|
||||
continue
|
||||
|
||||
log.info("Loading results from \"%s\" with module %s", file_name,
|
||||
iocs_module.__name__)
|
||||
|
||||
m = iocs_module.from_json(file_path,
|
||||
log=logging.getLogger(iocs_module.__module__))
|
||||
|
||||
m.indicators = indicators
|
||||
m.indicators.log = m.log
|
||||
|
||||
try:
|
||||
m.check_indicators()
|
||||
except NotImplementedError:
|
||||
continue
|
||||
#==============================================================================
|
||||
# Command: download-iocs
|
||||
#==============================================================================
|
||||
@cli.command("download-iocs", help="Download public STIX2 indicators")
|
||||
def download_iocs():
|
||||
ioc_updates = IndicatorsUpdates()
|
||||
ioc_updates.update()
|
||||
|
||||
29
mvt/ios/cmd_check_backup.py
Normal file
29
mvt/ios/cmd_check_backup.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.common.command import Command
|
||||
|
||||
from .modules.backup import BACKUP_MODULES
|
||||
from .modules.mixed import MIXED_MODULES
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CmdIOSCheckBackup(Command):
|
||||
|
||||
name = "check-backup"
|
||||
modules = BACKUP_MODULES + MIXED_MODULES
|
||||
|
||||
def __init__(self, target_path: str = None, results_path: str = None,
|
||||
ioc_files: list = [], module_name: str = None, serial: str = None,
|
||||
fast_mode: bool = False):
|
||||
super().__init__(target_path=target_path, results_path=results_path,
|
||||
ioc_files=ioc_files, module_name=module_name,
|
||||
serial=serial, fast_mode=fast_mode, log=log)
|
||||
|
||||
def module_init(self, module):
|
||||
module.is_backup = True
|
||||
29
mvt/ios/cmd_check_fs.py
Normal file
29
mvt/ios/cmd_check_fs.py
Normal file
@@ -0,0 +1,29 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.common.command import Command
|
||||
|
||||
from .modules.fs import FS_MODULES
|
||||
from .modules.mixed import MIXED_MODULES
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CmdIOSCheckFS(Command):
|
||||
|
||||
name = "check-fs"
|
||||
modules = FS_MODULES + MIXED_MODULES
|
||||
|
||||
def __init__(self, target_path: str = None, results_path: str = None,
|
||||
ioc_files: list = [], module_name: str = None, serial: str = None,
|
||||
fast_mode: bool = False):
|
||||
super().__init__(target_path=target_path, results_path=results_path,
|
||||
ioc_files=ioc_files, module_name=module_name,
|
||||
serial=serial, fast_mode=fast_mode, log=log)
|
||||
|
||||
def module_init(self, module):
|
||||
module.is_fs_dump = True
|
||||
@@ -1,12 +1,14 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import binascii
|
||||
import glob
|
||||
import logging
|
||||
import multiprocessing
|
||||
import os
|
||||
import os.path
|
||||
import shutil
|
||||
import sqlite3
|
||||
|
||||
@@ -14,6 +16,7 @@ from iOSbackup import iOSbackup
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DecryptBackup:
|
||||
"""This class provides functions to decrypt an encrypted iTunes backup
|
||||
using either a password or a key file.
|
||||
@@ -21,12 +24,12 @@ class DecryptBackup:
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, backup_path, dest_path=None):
|
||||
def __init__(self, backup_path: str, dest_path: str = None) -> None:
|
||||
"""Decrypts an encrypted iOS backup.
|
||||
:param backup_path: Path to the encrypted backup folder
|
||||
:param dest_path: Path to the folder where to store the decrypted backup
|
||||
"""
|
||||
self.backup_path = backup_path
|
||||
self.backup_path = os.path.abspath(backup_path)
|
||||
self.dest_path = dest_path
|
||||
self._backup = None
|
||||
self._decryption_key = None
|
||||
@@ -35,7 +38,7 @@ class DecryptBackup:
|
||||
return self._backup is not None
|
||||
|
||||
@staticmethod
|
||||
def is_encrypted(backup_path) -> bool:
|
||||
def is_encrypted(backup_path: str) -> bool:
|
||||
"""Query Manifest.db file to see if it's encrypted or not.
|
||||
|
||||
:param backup_path: Path to the backup to decrypt
|
||||
@@ -51,7 +54,14 @@ class DecryptBackup:
|
||||
log.critical("The backup does not seem encrypted!")
|
||||
return False
|
||||
|
||||
def process_backup(self):
|
||||
def _process_file(self, relative_path: str, domain: str, item,
|
||||
file_id: str, item_folder: str) -> None:
|
||||
self._backup.getFileDecryptedCopy(manifestEntry=item,
|
||||
targetName=file_id,
|
||||
targetFolder=item_folder)
|
||||
log.info("Decrypted file %s [%s] to %s/%s", relative_path, domain, item_folder, file_id)
|
||||
|
||||
def process_backup(self) -> None:
|
||||
if not os.path.exists(self.dest_path):
|
||||
os.makedirs(self.dest_path)
|
||||
|
||||
@@ -61,6 +71,8 @@ class DecryptBackup:
|
||||
# We store it to the destination folder.
|
||||
shutil.copy(self._backup.manifestDB, manifest_path)
|
||||
|
||||
pool = multiprocessing.Pool(multiprocessing.cpu_count())
|
||||
|
||||
for item in self._backup.getBackupFilesList():
|
||||
try:
|
||||
file_id = item["backupFile"]
|
||||
@@ -83,13 +95,16 @@ class DecryptBackup:
|
||||
# Add manifest plist to both keys to handle this.
|
||||
item["manifest"] = item["file"]
|
||||
|
||||
self._backup.getFileDecryptedCopy(manifestEntry=item,
|
||||
targetName=file_id,
|
||||
targetFolder=item_folder)
|
||||
log.info("Decrypted file %s [%s] to %s/%s", relative_path, domain, item_folder, file_id)
|
||||
pool.apply_async(self._process_file, args=(relative_path,
|
||||
domain, item,
|
||||
file_id,
|
||||
item_folder))
|
||||
except Exception as e:
|
||||
log.error("Failed to decrypt file %s: %s", relative_path, e)
|
||||
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
# Copying over the root plist files as well.
|
||||
for file_name in os.listdir(self.backup_path):
|
||||
if file_name.endswith(".plist"):
|
||||
@@ -97,7 +112,7 @@ class DecryptBackup:
|
||||
shutil.copy(os.path.join(self.backup_path, file_name),
|
||||
self.dest_path)
|
||||
|
||||
def decrypt_with_password(self, password):
|
||||
def decrypt_with_password(self, password: str) -> None:
|
||||
"""Decrypts an encrypted iOS backup.
|
||||
|
||||
:param password: Password to use to decrypt the original backup
|
||||
@@ -135,7 +150,7 @@ class DecryptBackup:
|
||||
log.exception(e)
|
||||
log.critical("Failed to decrypt backup. Did you provide the correct password? Did you point to the right backup path?")
|
||||
|
||||
def decrypt_with_key_file(self, key_file):
|
||||
def decrypt_with_key_file(self, key_file: str) -> None:
|
||||
"""Decrypts an encrypted iOS backup using a key file.
|
||||
|
||||
:param key_file: File to read the key bytes to decrypt the backup
|
||||
@@ -165,7 +180,7 @@ class DecryptBackup:
|
||||
log.exception(e)
|
||||
log.critical("Failed to decrypt backup. Did you provide the correct key file?")
|
||||
|
||||
def get_key(self):
|
||||
def get_key(self) -> None:
|
||||
"""Retrieve and prints the encryption key."""
|
||||
if not self._backup:
|
||||
return
|
||||
@@ -174,7 +189,7 @@ class DecryptBackup:
|
||||
log.info("Derived decryption key for backup at path %s is: \"%s\"",
|
||||
self.backup_path, self._decryption_key)
|
||||
|
||||
def write_key(self, key_path):
|
||||
def write_key(self, key_path: str) -> None:
|
||||
"""Save extracted key to file.
|
||||
|
||||
:param key_path: Path to the file where to write the derived decryption key.
|
||||
@@ -184,7 +199,7 @@ class DecryptBackup:
|
||||
return
|
||||
|
||||
try:
|
||||
with open(key_path, 'w') as handle:
|
||||
with open(key_path, 'w', encoding="utf-8") as handle:
|
||||
handle.write(self._decryption_key)
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
import plistlib
|
||||
|
||||
from mvt.common.module import DatabaseNotFoundError
|
||||
from mvt.ios.versions import latest_ios_version
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
@@ -14,16 +16,17 @@ from ..base import IOSExtraction
|
||||
class BackupInfo(IOSExtraction):
|
||||
"""This module extracts information about the device and the backup."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.results = {}
|
||||
|
||||
def run(self):
|
||||
info_path = os.path.join(self.base_folder, "Info.plist")
|
||||
def run(self) -> None:
|
||||
info_path = os.path.join(self.target_path, "Info.plist")
|
||||
if not os.path.exists(info_path):
|
||||
raise DatabaseNotFoundError("No Info.plist at backup path, unable to extract device information")
|
||||
|
||||
@@ -41,3 +44,9 @@ class BackupInfo(IOSExtraction):
|
||||
value = info.get(field, None)
|
||||
self.log.info("%s: %s", field, value)
|
||||
self.results[field] = value
|
||||
|
||||
if "Product Version" in info:
|
||||
latest = latest_ios_version()
|
||||
if info["Product Version"] != latest["version"]:
|
||||
self.log.warning("This phone is running an outdated iOS version: %s (latest is %s)",
|
||||
info["Product Version"], latest['version'])
|
||||
|
||||
@@ -1,41 +1,106 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
import plistlib
|
||||
from base64 import b64encode
|
||||
|
||||
from mvt.common.utils import convert_timestamp_to_iso
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
CONF_PROFILES_DOMAIN = "SysSharedContainerDomain-systemgroup.com.apple.configurationprofiles"
|
||||
|
||||
|
||||
class ConfigurationProfiles(IOSExtraction):
|
||||
"""This module extracts the full plist data from configuration profiles."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def run(self):
|
||||
def serialize(self, record: dict) -> None:
|
||||
if not record["install_date"]:
|
||||
return
|
||||
|
||||
payload_name = record['plist'].get('PayloadDisplayName')
|
||||
payload_description = record['plist'].get('PayloadDescription')
|
||||
return {
|
||||
"timestamp": record["install_date"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "configuration_profile_install",
|
||||
"data": f"{record['plist']['PayloadType']} installed: {record['plist']['PayloadUUID']} - {payload_name}: {payload_description}"
|
||||
}
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
if result["plist"].get("PayloadUUID"):
|
||||
payload_content = result["plist"]["PayloadContent"][0]
|
||||
|
||||
# Alert on any known malicious configuration profiles in the indicator list.
|
||||
ioc = self.indicators.check_profile(result["plist"]["PayloadUUID"])
|
||||
if ioc:
|
||||
self.log.warning(f"Found a known malicious configuration profile \"{result['plist']['PayloadDisplayName']}\" with UUID '{result['plist']['PayloadUUID']}'.")
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
# Highlight suspicious configuration profiles which may be used to hide notifications.
|
||||
if payload_content["PayloadType"] in ["com.apple.notificationsettings"]:
|
||||
self.log.warning(f"Found a potentially suspicious configuration profile \"{result['plist']['PayloadDisplayName']}\" with payload type '{payload_content['PayloadType']}'.")
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self) -> None:
|
||||
for conf_file in self._get_backup_files_from_manifest(domain=CONF_PROFILES_DOMAIN):
|
||||
conf_rel_path = conf_file["relative_path"]
|
||||
# Filter out all configuration files that are not configuration profiles.
|
||||
if not conf_rel_path or not os.path.basename(conf_rel_path).startswith("profile-"):
|
||||
continue
|
||||
|
||||
conf_file_path = self._get_backup_file_from_id(conf_file["file_id"])
|
||||
if not conf_file_path:
|
||||
continue
|
||||
|
||||
with open(conf_file_path, "rb") as handle:
|
||||
conf_plist = plistlib.load(handle)
|
||||
|
||||
try:
|
||||
conf_plist = plistlib.load(handle)
|
||||
except Exception:
|
||||
conf_plist = {}
|
||||
if "SignerCerts" in conf_plist:
|
||||
conf_plist["SignerCerts"] = [b64encode(x) for x in conf_plist["SignerCerts"]]
|
||||
if "OTAProfileStub" in conf_plist:
|
||||
if "SignerCerts" in conf_plist["OTAProfileStub"]:
|
||||
conf_plist["OTAProfileStub"]["SignerCerts"] = [b64encode(x) for x in conf_plist["OTAProfileStub"]["SignerCerts"]]
|
||||
if "PayloadContent" in conf_plist["OTAProfileStub"]:
|
||||
if "EnrollmentIdentityPersistentID" in conf_plist["OTAProfileStub"]["PayloadContent"]:
|
||||
conf_plist["OTAProfileStub"]["PayloadContent"]["EnrollmentIdentityPersistentID"] = b64encode(conf_plist["OTAProfileStub"]["PayloadContent"]["EnrollmentIdentityPersistentID"])
|
||||
if "PushTokenDataSentToServerKey" in conf_plist:
|
||||
conf_plist["PushTokenDataSentToServerKey"] = b64encode(conf_plist["PushTokenDataSentToServerKey"])
|
||||
if "LastPushTokenHash" in conf_plist:
|
||||
conf_plist["LastPushTokenHash"] = b64encode(conf_plist["LastPushTokenHash"])
|
||||
if "PayloadContent" in conf_plist:
|
||||
for x in range(len(conf_plist["PayloadContent"])):
|
||||
if "PERSISTENT_REF" in conf_plist["PayloadContent"][x]:
|
||||
conf_plist["PayloadContent"][x]["PERSISTENT_REF"] = b64encode(conf_plist["PayloadContent"][x]["PERSISTENT_REF"])
|
||||
if "IdentityPersistentRef" in conf_plist["PayloadContent"][x]:
|
||||
conf_plist["PayloadContent"][x]["IdentityPersistentRef"] = b64encode(conf_plist["PayloadContent"][x]["IdentityPersistentRef"])
|
||||
|
||||
self.results.append({
|
||||
"file_id": conf_file["file_id"],
|
||||
"relative_path": conf_file["relative_path"],
|
||||
"domain": conf_file["domain"],
|
||||
"plist": conf_plist,
|
||||
"install_date": convert_timestamp_to_iso(conf_plist.get("InstallDate")),
|
||||
})
|
||||
|
||||
self.log.info("Extracted details about %d configuration profiles", len(self.results))
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import datetime
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import plistlib
|
||||
import sqlite3
|
||||
@@ -18,18 +19,19 @@ from ..base import IOSExtraction
|
||||
class Manifest(IOSExtraction):
|
||||
"""This module extracts information from a backup Manifest.db file."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def _get_key(self, dictionary, key):
|
||||
"""Unserialized plist objects can have keys which are str or byte types
|
||||
This is a helper to try fetch a key as both a byte or string type.
|
||||
|
||||
:param dictionary: param key:
|
||||
:param key:
|
||||
:param dictionary:
|
||||
:param key:
|
||||
|
||||
"""
|
||||
return dictionary.get(key.encode("utf-8"), None) or dictionary.get(key, None)
|
||||
@@ -38,7 +40,7 @@ class Manifest(IOSExtraction):
|
||||
def _convert_timestamp(timestamp_or_unix_time_int):
|
||||
"""Older iOS versions stored the manifest times as unix timestamps.
|
||||
|
||||
:param timestamp_or_unix_time_int:
|
||||
:param timestamp_or_unix_time_int:
|
||||
|
||||
"""
|
||||
if isinstance(timestamp_or_unix_time_int, datetime.datetime):
|
||||
@@ -47,7 +49,7 @@ class Manifest(IOSExtraction):
|
||||
timestamp = datetime.datetime.utcfromtimestamp(timestamp_or_unix_time_int)
|
||||
return convert_timestamp_to_iso(timestamp)
|
||||
|
||||
def serialize(self, record):
|
||||
def serialize(self, record: dict) -> None:
|
||||
records = []
|
||||
if "modified" not in record or "status_changed" not in record:
|
||||
return
|
||||
@@ -67,14 +69,12 @@ class Manifest(IOSExtraction):
|
||||
|
||||
return records
|
||||
|
||||
def check_indicators(self):
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
if not "relative_path" in result:
|
||||
continue
|
||||
if not result["relative_path"]:
|
||||
if not result.get("relative_path"):
|
||||
continue
|
||||
|
||||
if result["domain"]:
|
||||
@@ -83,20 +83,19 @@ class Manifest(IOSExtraction):
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
if self.indicators.check_file(result["relative_path"]):
|
||||
self.log.warning("Found a known malicious file at path: %s", result["relative_path"])
|
||||
if self.indicators.check_file_path("/" + result["relative_path"]):
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
relPath = result["relative_path"].lower()
|
||||
for ioc in self.indicators.ioc_domains:
|
||||
if ioc.lower() in relPath:
|
||||
rel_path = result["relative_path"].lower()
|
||||
for ioc in self.indicators.get_iocs("domains"):
|
||||
if ioc["value"].lower() in rel_path:
|
||||
self.log.warning("Found mention of domain \"%s\" in a backup file with path: %s",
|
||||
ioc, relPath)
|
||||
ioc["value"], rel_path)
|
||||
self.detected.append(result)
|
||||
|
||||
def run(self):
|
||||
manifest_db_path = os.path.join(self.base_folder, "Manifest.db")
|
||||
def run(self) -> None:
|
||||
manifest_db_path = os.path.join(self.target_path, "Manifest.db")
|
||||
if not os.path.isfile(manifest_db_path):
|
||||
raise DatabaseNotFoundError("unable to find backup's Manifest.db")
|
||||
|
||||
@@ -133,7 +132,7 @@ class Manifest(IOSExtraction):
|
||||
"owner": self._get_key(file_metadata, "UserID"),
|
||||
"size": self._get_key(file_metadata, "Size"),
|
||||
})
|
||||
except:
|
||||
except Exception:
|
||||
self.log.exception("Error reading manifest file metadata for file with ID %s and relative path %s",
|
||||
file_data["fileID"], file_data["relativePath"])
|
||||
pass
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import plistlib
|
||||
|
||||
from mvt.common.utils import convert_timestamp_to_iso
|
||||
@@ -11,50 +12,89 @@ from ..base import IOSExtraction
|
||||
|
||||
CONF_PROFILES_EVENTS_RELPATH = "Library/ConfigurationProfiles/MCProfileEvents.plist"
|
||||
|
||||
|
||||
class ProfileEvents(IOSExtraction):
|
||||
"""This module extracts events related to the installation of configuration
|
||||
profiles.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
def serialize(self, record: dict) -> None:
|
||||
return {
|
||||
"timestamp": record.get("timestamp"),
|
||||
"module": self.__class__.__name__,
|
||||
"event": "profile_operation",
|
||||
"data": f"Process {record.get('process')} started operation {record.get('operation')} of profile {record.get('profile_id')}"
|
||||
"data": f"Process {record.get('process')} started operation " \
|
||||
f"{record.get('operation')} of profile {record.get('profile_id')}"
|
||||
}
|
||||
|
||||
def run(self):
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_process(result.get("process"))
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
ioc = self.indicators.check_profile(result.get("profile_id"))
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
|
||||
@staticmethod
|
||||
def parse_profile_events(file_data: bytes) -> list:
|
||||
results = []
|
||||
|
||||
events_plist = plistlib.loads(file_data)
|
||||
|
||||
if "ProfileEvents" not in events_plist:
|
||||
return results
|
||||
|
||||
for event in events_plist["ProfileEvents"]:
|
||||
key = list(event.keys())[0]
|
||||
|
||||
result = {
|
||||
"profile_id": key,
|
||||
"timestamp": "",
|
||||
"operation": "",
|
||||
"process": "",
|
||||
}
|
||||
|
||||
for key, value in event[key].items():
|
||||
key = key.lower()
|
||||
if key == "timestamp":
|
||||
result["timestamp"] = str(convert_timestamp_to_iso(value))
|
||||
else:
|
||||
result[key] = value
|
||||
|
||||
results.append(result)
|
||||
|
||||
return results
|
||||
|
||||
def run(self) -> None:
|
||||
for events_file in self._get_backup_files_from_manifest(relative_path=CONF_PROFILES_EVENTS_RELPATH):
|
||||
events_file_path = self._get_backup_file_from_id(events_file["file_id"])
|
||||
if not events_file_path:
|
||||
continue
|
||||
|
||||
self.log.info("Found MCProfileEvents.plist file at %s", events_file_path)
|
||||
|
||||
with open(events_file_path, "rb") as handle:
|
||||
events_plist = plistlib.load(handle)
|
||||
self.results.extend(self.parse_profile_events(handle.read()))
|
||||
|
||||
if "ProfileEvents" not in events_plist:
|
||||
continue
|
||||
|
||||
for event in events_plist["ProfileEvents"]:
|
||||
key = list(event.keys())[0]
|
||||
self.log.info("On %s process \"%s\" started operation \"%s\" of profile \"%s\"",
|
||||
event[key].get("timestamp"), event[key].get("process"),
|
||||
event[key].get("operation"), key)
|
||||
|
||||
self.results.append({
|
||||
"profile_id": key,
|
||||
"timestamp": convert_timestamp_to_iso(event[key].get("timestamp")),
|
||||
"operation": event[key].get("operation"),
|
||||
"process": event[key].get("process"),
|
||||
})
|
||||
for result in self.results:
|
||||
self.log.info("On %s process \"%s\" started operation \"%s\" of profile \"%s\"",
|
||||
result.get("timestamp"), result.get("process"),
|
||||
result.get("operation"), result.get("profile_id"))
|
||||
|
||||
self.log.info("Extracted %d profile events", len(self.results))
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import glob
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import sqlite3
|
||||
@@ -16,37 +17,39 @@ from mvt.common.module import (DatabaseCorruptedError, DatabaseNotFoundError,
|
||||
class IOSExtraction(MVTModule):
|
||||
"""This class provides a base for all iOS filesystem/backup extraction modules."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.is_backup = False
|
||||
self.is_fs_dump = False
|
||||
self.is_sysdiagnose = False
|
||||
|
||||
def _recover_sqlite_db_if_needed(self, file_path):
|
||||
def _recover_sqlite_db_if_needed(self, file_path, forced=False):
|
||||
"""Tries to recover a malformed database by running a .clone command.
|
||||
|
||||
:param file_path: Path to the malformed database file.
|
||||
|
||||
"""
|
||||
# TODO: Find a better solution.
|
||||
conn = sqlite3.connect(file_path)
|
||||
cur = conn.cursor()
|
||||
if not forced:
|
||||
conn = sqlite3.connect(file_path)
|
||||
cur = conn.cursor()
|
||||
|
||||
try:
|
||||
recover = False
|
||||
cur.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
||||
except sqlite3.DatabaseError as e:
|
||||
if "database disk image is malformed" in str(e):
|
||||
recover = True
|
||||
finally:
|
||||
conn.close()
|
||||
try:
|
||||
recover = False
|
||||
cur.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
||||
except sqlite3.DatabaseError as e:
|
||||
if "database disk image is malformed" in str(e):
|
||||
recover = True
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
if not recover:
|
||||
return
|
||||
if not recover:
|
||||
return
|
||||
|
||||
self.log.info("Database at path %s is malformed. Trying to recover...", file_path)
|
||||
|
||||
@@ -72,7 +75,7 @@ class IOSExtraction(MVTModule):
|
||||
:param domain: Domain to use as filter from Manifest.db. (Default value = None)
|
||||
|
||||
"""
|
||||
manifest_db_path = os.path.join(self.base_folder, "Manifest.db")
|
||||
manifest_db_path = os.path.join(self.target_path, "Manifest.db")
|
||||
if not os.path.exists(manifest_db_path):
|
||||
raise DatabaseNotFoundError("unable to find backup's Manifest.db")
|
||||
|
||||
@@ -100,7 +103,7 @@ class IOSExtraction(MVTModule):
|
||||
}
|
||||
|
||||
def _get_backup_file_from_id(self, file_id):
|
||||
file_path = os.path.join(self.base_folder, file_id[0:2], file_id)
|
||||
file_path = os.path.join(self.target_path, file_id[0:2], file_id)
|
||||
if os.path.exists(file_path):
|
||||
return file_path
|
||||
|
||||
@@ -108,7 +111,7 @@ class IOSExtraction(MVTModule):
|
||||
|
||||
def _get_fs_files_from_patterns(self, root_paths):
|
||||
for root_path in root_paths:
|
||||
for found_path in glob.glob(os.path.join(self.base_folder, root_path)):
|
||||
for found_path in glob.glob(os.path.join(self.target_path, root_path)):
|
||||
if not os.path.exists(found_path):
|
||||
continue
|
||||
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
from .analytics import Analytics
|
||||
from .analytics_ios_versions import AnalyticsIOSVersions
|
||||
from .cache_files import CacheFiles
|
||||
from .filesystem import Filesystem
|
||||
from .net_netusage import Netusage
|
||||
from .networking_analytics import NetworkingAnalytics
|
||||
from .safari_favicon import SafariFavicon
|
||||
from .shutdownlog import ShutdownLog
|
||||
from .version_history import IOSVersionHistory
|
||||
@@ -14,6 +15,6 @@ from .webkit_indexeddb import WebkitIndexedDB
|
||||
from .webkit_localstorage import WebkitLocalStorage
|
||||
from .webkit_safariviewservice import WebkitSafariViewService
|
||||
|
||||
FS_MODULES = [CacheFiles, Filesystem, Netusage, NetworkingAnalytics, SafariFavicon, ShutdownLog,
|
||||
IOSVersionHistory, WebkitIndexedDB, WebkitLocalStorage,
|
||||
WebkitSafariViewService,]
|
||||
FS_MODULES = [CacheFiles, Filesystem, Netusage, Analytics, AnalyticsIOSVersions,
|
||||
SafariFavicon, ShutdownLog, IOSVersionHistory, WebkitIndexedDB,
|
||||
WebkitLocalStorage, WebkitSafariViewService]
|
||||
|
||||
130
mvt/ios/modules/fs/analytics.py
Normal file
130
mvt/ios/modules/fs/analytics.py
Normal file
@@ -0,0 +1,130 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import plistlib
|
||||
import sqlite3
|
||||
|
||||
from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
ANALYTICS_DB_PATH = [
|
||||
"private/var/Keychains/Analytics/*.db",
|
||||
]
|
||||
|
||||
|
||||
class Analytics(IOSExtraction):
|
||||
"""This module extracts information from the private/var/Keychains/Analytics/*.db files."""
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record: dict) -> None:
|
||||
return {
|
||||
"timestamp": record["isodate"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": record["artifact"],
|
||||
"data": f"{record}",
|
||||
}
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
for value in result.values():
|
||||
if not isinstance(value, str):
|
||||
continue
|
||||
|
||||
ioc = self.indicators.check_process(value)
|
||||
if ioc:
|
||||
self.log.warning("Found mention of a malicious process \"%s\" in %s file at %s",
|
||||
value, result["artifact"], result["timestamp"])
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
ioc = self.indicators.check_domain(value)
|
||||
if ioc:
|
||||
self.log.warning("Found mention of a malicious domain \"%s\" in %s file at %s",
|
||||
value, result["artifact"], result["timestamp"])
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
|
||||
def _extract_analytics_data(self):
|
||||
artifact = self.file_path.split("/")[-1]
|
||||
|
||||
conn = sqlite3.connect(self.file_path)
|
||||
cur = conn.cursor()
|
||||
|
||||
try:
|
||||
cur.execute("""
|
||||
SELECT
|
||||
timestamp,
|
||||
data
|
||||
FROM hard_failures
|
||||
UNION
|
||||
SELECT
|
||||
timestamp,
|
||||
data
|
||||
FROM soft_failures
|
||||
UNION
|
||||
SELECT
|
||||
timestamp,
|
||||
data
|
||||
FROM all_events;
|
||||
""")
|
||||
except sqlite3.OperationalError:
|
||||
cur.execute("""
|
||||
SELECT
|
||||
timestamp,
|
||||
data
|
||||
FROM hard_failures
|
||||
UNION
|
||||
SELECT
|
||||
timestamp,
|
||||
data
|
||||
FROM soft_failures;
|
||||
""")
|
||||
|
||||
for row in cur:
|
||||
if row[0] and row[1]:
|
||||
isodate = convert_timestamp_to_iso(convert_mactime_to_unix(row[0], False))
|
||||
data = plistlib.loads(row[1])
|
||||
data["isodate"] = isodate
|
||||
elif row[0]:
|
||||
isodate = convert_timestamp_to_iso(convert_mactime_to_unix(row[0], False))
|
||||
data = {}
|
||||
data["isodate"] = isodate
|
||||
elif row[1]:
|
||||
isodate = ""
|
||||
data = plistlib.loads(row[1])
|
||||
data["isodate"] = isodate
|
||||
|
||||
data["artifact"] = artifact
|
||||
|
||||
self.results.append(data)
|
||||
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
def process_analytics_dbs(self):
|
||||
for file_path in self._get_fs_files_from_patterns(ANALYTICS_DB_PATH):
|
||||
self.file_path = file_path
|
||||
self.log.info("Found Analytics database file at path: %s", file_path)
|
||||
self._extract_analytics_data()
|
||||
|
||||
def run(self) -> None:
|
||||
self.process_analytics_dbs()
|
||||
|
||||
self.log.info("Extracted %d records from analytics databases",
|
||||
len(self.results))
|
||||
|
||||
self.results = sorted(self.results, key=lambda entry: entry["isodate"])
|
||||
73
mvt/ios/modules/fs/analytics_ios_versions.py
Normal file
73
mvt/ios/modules/fs/analytics_ios_versions.py
Normal file
@@ -0,0 +1,73 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
from datetime import datetime
|
||||
|
||||
from mvt.ios.versions import find_version_by_build
|
||||
|
||||
from ..base import IOSExtraction
|
||||
from .analytics import Analytics
|
||||
|
||||
|
||||
class AnalyticsIOSVersions(IOSExtraction):
|
||||
"""This module leverages the Analytics module in order to extract
|
||||
a timeline of build numbers from the private/var/Keychains/Analytics/*.db
|
||||
files."""
|
||||
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record: dict) -> None:
|
||||
return {
|
||||
"timestamp": record["isodate"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "analytics_ios_version",
|
||||
"data": f"Seen iOS version {record['version']} ({record['build']})",
|
||||
}
|
||||
|
||||
def run(self):
|
||||
anl = Analytics(target_path=self.target_path, log=self.log)
|
||||
anl.process_analytics_dbs()
|
||||
|
||||
dt_format = "%Y-%m-%d %H:%M:%S.%f"
|
||||
|
||||
builds = {}
|
||||
for result in anl.results:
|
||||
build = result.get("build")
|
||||
if not build:
|
||||
continue
|
||||
|
||||
ts = result.get("isodate", None)
|
||||
if not ts:
|
||||
continue
|
||||
|
||||
if build not in builds.keys():
|
||||
builds[build] = ts
|
||||
continue
|
||||
|
||||
result_dt = datetime.strptime(ts, dt_format)
|
||||
cur_dt = datetime.strptime(builds[build], dt_format)
|
||||
|
||||
if result_dt < cur_dt:
|
||||
builds[build] = ts
|
||||
|
||||
for build, ts in builds.items():
|
||||
version = find_version_by_build(build)
|
||||
|
||||
self.results.append({
|
||||
"isodate": ts,
|
||||
"build": build,
|
||||
"version": version,
|
||||
})
|
||||
|
||||
self.results = sorted(self.results, key=lambda entry: entry["isodate"])
|
||||
for result in self.results:
|
||||
self.log.info("iOS version %s (%s) first appeared on %s",
|
||||
result["version"], result["build"], result["isodate"])
|
||||
@@ -1,8 +1,9 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sqlite3
|
||||
|
||||
@@ -11,13 +12,14 @@ from ..base import IOSExtraction
|
||||
|
||||
class CacheFiles(IOSExtraction):
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
def serialize(self, record: dict) -> None:
|
||||
records = []
|
||||
for item in self.results[record]:
|
||||
records.append({
|
||||
@@ -29,18 +31,20 @@ class CacheFiles(IOSExtraction):
|
||||
|
||||
return records
|
||||
|
||||
def check_indicators(self):
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
self.detected = {}
|
||||
for key, items in self.results.items():
|
||||
for item in items:
|
||||
if self.indicators.check_domain(item["url"]):
|
||||
for key, values in self.results.items():
|
||||
for value in values:
|
||||
ioc = self.indicators.check_domain(value["url"])
|
||||
if ioc:
|
||||
value["matched_indicator"] = ioc
|
||||
if key not in self.detected:
|
||||
self.detected[key] = [item,]
|
||||
self.detected[key] = [value, ]
|
||||
else:
|
||||
self.detected[key].append(item)
|
||||
self.detected[key].append(value)
|
||||
|
||||
def _process_cache_file(self, file_path):
|
||||
self.log.info("Processing cache file at path: %s", file_path)
|
||||
@@ -53,8 +57,8 @@ class CacheFiles(IOSExtraction):
|
||||
except sqlite3.OperationalError:
|
||||
return
|
||||
|
||||
key_name = os.path.relpath(file_path, self.base_folder)
|
||||
if not key_name in self.results:
|
||||
key_name = os.path.relpath(file_path, self.target_path)
|
||||
if key_name not in self.results:
|
||||
self.results[key_name] = []
|
||||
|
||||
for row in cur:
|
||||
@@ -67,9 +71,9 @@ class CacheFiles(IOSExtraction):
|
||||
"isodate": row[5],
|
||||
})
|
||||
|
||||
def run(self):
|
||||
def run(self) -> None:
|
||||
self.results = {}
|
||||
for root, dirs, files in os.walk(self.base_folder):
|
||||
for root, dirs, files in os.walk(self.target_path):
|
||||
for file_name in files:
|
||||
if file_name != "Cache.db":
|
||||
continue
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021 The MVT Project Authors.
|
||||
# Copyright (c) 2021-2022 Claudio Guarnieri.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import os
|
||||
|
||||
from mvt.common.utils import convert_timestamp_to_iso
|
||||
@@ -18,13 +19,14 @@ class Filesystem(IOSExtraction):
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
def __init__(self, file_path: str = None, target_path: str = None,
|
||||
results_path: str = None, fast_mode: bool = False,
|
||||
log: logging.Logger = None, results: list = []) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
def serialize(self, record: dict) -> None:
|
||||
return {
|
||||
"timestamp": record["modified"],
|
||||
"module": self.__class__.__name__,
|
||||
@@ -32,35 +34,41 @@ class Filesystem(IOSExtraction):
|
||||
"data": record["path"],
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
def check_indicators(self) -> None:
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
if self.indicators.check_file(result["path"]):
|
||||
self.log.warning("Found a known malicious file at path: %s", result["path"])
|
||||
if "path" not in result:
|
||||
continue
|
||||
|
||||
ioc = self.indicators.check_file_path(result["path"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
|
||||
# If we are instructed to run fast, we skip this.
|
||||
# If we are instructed to run fast, we skip the rest.
|
||||
if self.fast_mode:
|
||||
self.log.info("Flag --fast was enabled: skipping extended search for suspicious files/processes")
|
||||
else:
|
||||
for ioc in self.indicators.ioc_processes:
|
||||
parts = result["path"].split("/")
|
||||
if ioc in parts:
|
||||
self.log.warning("Found a known malicious file/process at path: %s", result["path"])
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self):
|
||||
for root, dirs, files in os.walk(self.base_folder):
|
||||
for ioc in self.indicators.get_iocs("processes"):
|
||||
parts = result["path"].split("/")
|
||||
if ioc["value"] in parts:
|
||||
self.log.warning("Found known suspicious process name mentioned in file at path \"%s\" matching indicators from \"%s\"",
|
||||
result["path"], ioc["name"])
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
|
||||
def run(self) -> None:
|
||||
for root, dirs, files in os.walk(self.target_path):
|
||||
for dir_name in dirs:
|
||||
try:
|
||||
dir_path = os.path.join(root, dir_name)
|
||||
result = {
|
||||
"path": os.path.relpath(dir_path, self.base_folder),
|
||||
"path": os.path.relpath(dir_path, self.target_path),
|
||||
"modified": convert_timestamp_to_iso(datetime.datetime.utcfromtimestamp(os.stat(dir_path).st_mtime)),
|
||||
}
|
||||
except:
|
||||
except Exception:
|
||||
continue
|
||||
else:
|
||||
self.results.append(result)
|
||||
@@ -69,10 +77,10 @@ class Filesystem(IOSExtraction):
|
||||
try:
|
||||
file_path = os.path.join(root, file_name)
|
||||
result = {
|
||||
"path": os.path.relpath(file_path, self.base_folder),
|
||||
"path": os.path.relpath(file_path, self.target_path),
|
||||
"modified": convert_timestamp_to_iso(datetime.datetime.utcfromtimestamp(os.stat(file_path).st_mtime)),
|
||||
}
|
||||
except:
|
||||
except Exception:
|
||||
continue
|
||||
else:
|
||||
self.results.append(result)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user