mirror of
https://github.com/mvt-project/mvt.git
synced 2026-02-14 09:32:51 +00:00
Compare commits
602 Commits
v1.5.3
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2737c41108 | ||
|
|
7173e02a6f | ||
|
|
8f34902bed | ||
|
|
939bec82ff | ||
|
|
b183ca33b5 | ||
|
|
a2c9e0c6cf | ||
|
|
4bfad1f87d | ||
|
|
c3dc3d96d5 | ||
|
|
afab222f93 | ||
|
|
5a1166c416 | ||
|
|
dd3d665bea | ||
|
|
5c3b92aeee | ||
|
|
d7e058af43 | ||
|
|
cdbaad94cc | ||
|
|
981371bd8b | ||
|
|
c7d00978c6 | ||
|
|
339a1d0712 | ||
|
|
7009cddc8c | ||
|
|
9b4d10139c | ||
|
|
b795ea3129 | ||
|
|
5be5ffbf49 | ||
|
|
2701490501 | ||
|
|
779842567d | ||
|
|
d3cc8cf590 | ||
|
|
b8a42eaf8f | ||
|
|
62b880fbff | ||
|
|
0778d448df | ||
|
|
f020655a1a | ||
|
|
91c34e6664 | ||
|
|
b4a8dd226a | ||
|
|
88213e12c9 | ||
|
|
f75b8e186a | ||
|
|
5babc1fcf3 | ||
|
|
b723ebf28e | ||
|
|
616e870212 | ||
|
|
847b0e087b | ||
|
|
86a0772eb2 | ||
|
|
7d0be9db4f | ||
|
|
4e120b2640 | ||
|
|
dbe9e5db9b | ||
|
|
0b00398729 | ||
|
|
87034d2c7a | ||
|
|
595a2f6536 | ||
|
|
8ead44a31e | ||
|
|
5c19d02a73 | ||
|
|
14ebc9ee4e | ||
|
|
de53cc07f8 | ||
|
|
22e066fc4a | ||
|
|
242052b8ec | ||
|
|
1df61b5bbf | ||
|
|
b691de2cc0 | ||
|
|
10915f250c | ||
|
|
c60cef4009 | ||
|
|
dda798df8e | ||
|
|
ffe6ad2014 | ||
|
|
a125b20fc5 | ||
|
|
49108e67e2 | ||
|
|
883b450601 | ||
|
|
ce813568ff | ||
|
|
93303f181a | ||
|
|
bee453a090 | ||
|
|
42106aa4d6 | ||
|
|
95076c8f71 | ||
|
|
c9ac12f336 | ||
|
|
486e3e7e9b | ||
|
|
be1fc3bd8b | ||
|
|
4757cff262 | ||
|
|
61f51caf31 | ||
|
|
511063fd0e | ||
|
|
88bc5672cb | ||
|
|
0fce0acf7a | ||
|
|
61f95d07d3 | ||
|
|
3dedd169c4 | ||
|
|
e34e03d3a3 | ||
|
|
34374699ce | ||
|
|
cf5aa7c89f | ||
|
|
2766739512 | ||
|
|
9c84afb4b0 | ||
|
|
80fc8bd879 | ||
|
|
ca41f7f106 | ||
|
|
55ddd86ad5 | ||
|
|
b184eeedf4 | ||
|
|
4e97e85350 | ||
|
|
e5865b166e | ||
|
|
a2dabb4267 | ||
|
|
b7595b62eb | ||
|
|
02c02ca15c | ||
|
|
6da33394fe | ||
|
|
086871e21d | ||
|
|
f32830c649 | ||
|
|
edcad488ab | ||
|
|
43901c96a0 | ||
|
|
0962383b46 | ||
|
|
34cd08fd9a | ||
|
|
579b53f7ec | ||
|
|
dbb80d6320 | ||
|
|
0fbf24e82a | ||
|
|
a2493baead | ||
|
|
0dc6228a59 | ||
|
|
6e230bdb6a | ||
|
|
2aa76c8a1c | ||
|
|
7d6dc9e6dc | ||
|
|
458195a0ab | ||
|
|
52e854b8b7 | ||
|
|
0f1eec3971 | ||
|
|
f4425865c0 | ||
|
|
28c0c86c4e | ||
|
|
154e6dab15 | ||
|
|
0c73e3e8fa | ||
|
|
9b5f2d89d5 | ||
|
|
3da61c8da8 | ||
|
|
5b2fe3baec | ||
|
|
a3a7789547 | ||
|
|
d3fcc686ff | ||
|
|
4bcc0e5f27 | ||
|
|
9d81b5bfa8 | ||
|
|
22fce280af | ||
|
|
4739d8853e | ||
|
|
ace01ff7fb | ||
|
|
7e4f0aec4d | ||
|
|
57647583cc | ||
|
|
8e895d3d07 | ||
|
|
bc09e2a394 | ||
|
|
2d0de088dd | ||
|
|
8694e7a047 | ||
|
|
9b41ba99aa | ||
|
|
cd99b293ed | ||
|
|
5fe8238ef0 | ||
|
|
1d44ae3987 | ||
|
|
bb68e41c07 | ||
|
|
787b0c1f48 | ||
|
|
83c1bbf714 | ||
|
|
17b625f311 | ||
|
|
7772d2de72 | ||
|
|
37705d11fa | ||
|
|
319bc7e9cd | ||
|
|
62cdfa1b59 | ||
|
|
cbb78b7ade | ||
|
|
4598293c82 | ||
|
|
6e0cd23bbc | ||
|
|
d6f3561995 | ||
|
|
19b3b97571 | ||
|
|
2c72d80e7c | ||
|
|
720aeff6e9 | ||
|
|
863de4f543 | ||
|
|
3afe218c7c | ||
|
|
665806db98 | ||
|
|
a03f4e55ff | ||
|
|
81b647beac | ||
|
|
5ef19a327c | ||
|
|
f4bf3f362b | ||
|
|
7575315966 | ||
|
|
9678eb17e5 | ||
|
|
7303bc06e5 | ||
|
|
477f9a7f6b | ||
|
|
aced1aa74d | ||
|
|
052c4e207b | ||
|
|
821943a859 | ||
|
|
f4437b30b1 | ||
|
|
d4946b04bf | ||
|
|
a15d9f721d | ||
|
|
10e7599c6e | ||
|
|
a44688c501 | ||
|
|
c66a38e5c0 | ||
|
|
ee2fab8d87 | ||
|
|
f8e2b0921a | ||
|
|
5225600396 | ||
|
|
2c4c92f510 | ||
|
|
656feb1da7 | ||
|
|
79dd5b8bad | ||
|
|
f79938b082 | ||
|
|
822536a1cb | ||
|
|
69fb8c236f | ||
|
|
5dfa0153ee | ||
|
|
d79f6cbd7d | ||
|
|
617c5d9e1c | ||
|
|
ae9f874e1b | ||
|
|
b58351bfbd | ||
|
|
287a11a2ee | ||
|
|
efe46d7b49 | ||
|
|
102dd31bd6 | ||
|
|
e00895aa9d | ||
|
|
79dbf999a9 | ||
|
|
89d31f3212 | ||
|
|
caeeec2816 | ||
|
|
9e19abb5d3 | ||
|
|
cf5cf3b85d | ||
|
|
f0dbe0bfa6 | ||
|
|
555e49fda7 | ||
|
|
a6d32e1c88 | ||
|
|
f155146f1e | ||
|
|
9d47acc228 | ||
|
|
cbd41b2aff | ||
|
|
0509eaa162 | ||
|
|
59e6dff1e1 | ||
|
|
f1821d1a02 | ||
|
|
6c7ad0ac95 | ||
|
|
3a997d30d2 | ||
|
|
6f56939dd7 | ||
|
|
7a4946e2c6 | ||
|
|
e1c4f4eb7a | ||
|
|
f9d7b550dc | ||
|
|
b738603911 | ||
|
|
5826e6b11c | ||
|
|
54c5d549af | ||
|
|
dded863e58 | ||
|
|
fc7ea5383e | ||
|
|
04b78a4d60 | ||
|
|
4ea53d707b | ||
|
|
da743a2878 | ||
|
|
4681b57adc | ||
|
|
bb7a22ed0b | ||
|
|
b2df17b4a0 | ||
|
|
278611a753 | ||
|
|
cd4d468553 | ||
|
|
1182587094 | ||
|
|
ad3bc3470e | ||
|
|
2c5ae696b1 | ||
|
|
5d2ff32e3a | ||
|
|
2838bac63f | ||
|
|
b7df87a62f | ||
|
|
013282dbba | ||
|
|
ab33789f06 | ||
|
|
a1571c127d | ||
|
|
61f33f7ecb | ||
|
|
4a6b483ce3 | ||
|
|
101098cbb7 | ||
|
|
fd3ef76873 | ||
|
|
fb52f73556 | ||
|
|
acc950377f | ||
|
|
c8a0327768 | ||
|
|
1d075abde9 | ||
|
|
73104814ba | ||
|
|
2098201024 | ||
|
|
4b4cad46ba | ||
|
|
815678dff7 | ||
|
|
bdd0124b80 | ||
|
|
894b99b177 | ||
|
|
8796a0b965 | ||
|
|
f786e2c9bf | ||
|
|
5b0fdd117c | ||
|
|
7b1c0f6443 | ||
|
|
353381a7d6 | ||
|
|
5462c8d6e7 | ||
|
|
63299b530b | ||
|
|
737d17086c | ||
|
|
499ded4e7b | ||
|
|
63c4dea3d0 | ||
|
|
e872e34c5a | ||
|
|
4f3a16daf0 | ||
|
|
eb5bfb7f35 | ||
|
|
a7da992d75 | ||
|
|
652dedd620 | ||
|
|
4bdb97ebcc | ||
|
|
da6116ee20 | ||
|
|
65cee2ae53 | ||
|
|
97af2d3605 | ||
|
|
b39936a060 | ||
|
|
94a98b4f83 | ||
|
|
0c7ea142f7 | ||
|
|
e96ffbb022 | ||
|
|
ba1d9699c3 | ||
|
|
9988887d27 | ||
|
|
04b44826b4 | ||
|
|
2bfe5443c8 | ||
|
|
c719c4da1e | ||
|
|
0f3e93c152 | ||
|
|
a2ee46b8f8 | ||
|
|
e60e5fdc6e | ||
|
|
7e0e071c5d | ||
|
|
b259db30f8 | ||
|
|
26f981244d | ||
|
|
2069e2b760 | ||
|
|
355480414f | ||
|
|
9a831b5930 | ||
|
|
a103b50759 | ||
|
|
84dc13144d | ||
|
|
6356a4ff87 | ||
|
|
f96f2fe34a | ||
|
|
ae0e470c56 | ||
|
|
4c175530a8 | ||
|
|
ecf75447aa | ||
|
|
0389d335ed | ||
|
|
7f9acec108 | ||
|
|
3ec3b86a45 | ||
|
|
57d4aca72e | ||
|
|
1d740ad802 | ||
|
|
15ce1b7e64 | ||
|
|
d6fca2f8ae | ||
|
|
cabb679ff1 | ||
|
|
829a9f0cf6 | ||
|
|
52e0176d5d | ||
|
|
8d8bdf26de | ||
|
|
34fa77ae4d | ||
|
|
ed7d6fb847 | ||
|
|
a2386dbdf7 | ||
|
|
019cfbb84e | ||
|
|
3d924e22ec | ||
|
|
ca3c1bade4 | ||
|
|
85877fd3eb | ||
|
|
8015ff78e8 | ||
|
|
1a07b9a78f | ||
|
|
0b88de9867 | ||
|
|
0edc9d7b81 | ||
|
|
76d7534b05 | ||
|
|
ae2ab02347 | ||
|
|
e2c623c40f | ||
|
|
a6e1a3de12 | ||
|
|
e7270d6a07 | ||
|
|
1968a0fca2 | ||
|
|
46cc54df74 | ||
|
|
7046ff80d1 | ||
|
|
e2516f284b | ||
|
|
17963f83d6 | ||
|
|
4f0c9c6077 | ||
|
|
27bd5f03a8 | ||
|
|
3babbadc1d | ||
|
|
41db117168 | ||
|
|
2b01ed7179 | ||
|
|
78d493b17e | ||
|
|
473c80009b | ||
|
|
a1481683e3 | ||
|
|
bdd36a9179 | ||
|
|
e1677639c4 | ||
|
|
c2d740ed36 | ||
|
|
d0e24c6369 | ||
|
|
a1994079b1 | ||
|
|
289b7efdeb | ||
|
|
166a63e14c | ||
|
|
1b933fdb12 | ||
|
|
0c0ff7012b | ||
|
|
f9b0d07a81 | ||
|
|
d14bcdd05f | ||
|
|
e026bb0a76 | ||
|
|
253b4f031a | ||
|
|
ec14297643 | ||
|
|
3142d86edd | ||
|
|
c18998d771 | ||
|
|
22fd794fb8 | ||
|
|
27c5c76dc2 | ||
|
|
fafbac3545 | ||
|
|
bbfaadd297 | ||
|
|
85abed55b6 | ||
|
|
2fbd7607ef | ||
|
|
3787dc48cd | ||
|
|
f814244ff8 | ||
|
|
11730f164f | ||
|
|
912fb060cb | ||
|
|
a9edf4a9fe | ||
|
|
ea7b9066ba | ||
|
|
fd81e3aa13 | ||
|
|
15477cc187 | ||
|
|
551b95b38b | ||
|
|
d767abb912 | ||
|
|
8a507b0a0b | ||
|
|
63b95ee6a5 | ||
|
|
c8ae495971 | ||
|
|
33d092692e | ||
|
|
b1e5dc715f | ||
|
|
1dc1ee2238 | ||
|
|
a2cbaacfce | ||
|
|
801fe367ac | ||
|
|
0d653be4dd | ||
|
|
179b6976fa | ||
|
|
577fcf752d | ||
|
|
2942209f62 | ||
|
|
06bf7b9cb1 | ||
|
|
b5d7e528de | ||
|
|
70c6f0c153 | ||
|
|
49491800fb | ||
|
|
1ad176788b | ||
|
|
11d58022cf | ||
|
|
cc205bfab0 | ||
|
|
671cd07200 | ||
|
|
7581f81464 | ||
|
|
4ed8ff51ff | ||
|
|
fc4e2a9029 | ||
|
|
383d9b16de | ||
|
|
55f6a4ae54 | ||
|
|
89c6a35c26 | ||
|
|
25614922d7 | ||
|
|
7d79844749 | ||
|
|
83447411ff | ||
|
|
ce177978cd | ||
|
|
95842ac449 | ||
|
|
8ce6b31299 | ||
|
|
704ea39569 | ||
|
|
81ed0b0c19 | ||
|
|
318c908dd8 | ||
|
|
a5cf5271fa | ||
|
|
716909b528 | ||
|
|
cbd9158daf | ||
|
|
013e3421c8 | ||
|
|
1042354be5 | ||
|
|
96bc02d344 | ||
|
|
d05e6fac00 | ||
|
|
200e26d906 | ||
|
|
27fbdd2fd4 | ||
|
|
4bbaa20e22 | ||
|
|
99e14ad8b0 | ||
|
|
deaa68a2e0 | ||
|
|
07f819bf5f | ||
|
|
51fdfce7f4 | ||
|
|
41e05a107e | ||
|
|
e559fb223b | ||
|
|
b69bb92f3d | ||
|
|
42e8e41b7d | ||
|
|
00b7314395 | ||
|
|
39a8bf236d | ||
|
|
d268b17284 | ||
|
|
66c015bc23 | ||
|
|
ba0106c476 | ||
|
|
41826d7951 | ||
|
|
4e0a393a02 | ||
|
|
c3dc4174fc | ||
|
|
e1d1b6c5de | ||
|
|
d0a893841b | ||
|
|
d4e99661c7 | ||
|
|
6a00d3a14d | ||
|
|
a863209abb | ||
|
|
4c7db02da4 | ||
|
|
92dfefbdeb | ||
|
|
8988adcf77 | ||
|
|
91667b0ded | ||
|
|
2365175dbd | ||
|
|
528d43b914 | ||
|
|
f952ba5119 | ||
|
|
d61b2751f1 | ||
|
|
b4ed2c6ed4 | ||
|
|
3eed1d6edf | ||
|
|
83ef545cd1 | ||
|
|
5d4fbec62b | ||
|
|
fa7d6166f4 | ||
|
|
429b223555 | ||
|
|
e4b9a9652a | ||
|
|
134581c000 | ||
|
|
5356a399c9 | ||
|
|
e0f563596d | ||
|
|
ea5de0203a | ||
|
|
ace965ee8a | ||
|
|
ad8f455209 | ||
|
|
ae67b41374 | ||
|
|
5fe88098b9 | ||
|
|
d578c240f9 | ||
|
|
427a29c2b6 | ||
|
|
5e6f6faa9c | ||
|
|
74a3ecaa4e | ||
|
|
f536af1124 | ||
|
|
631354c131 | ||
|
|
7ad7782b51 | ||
|
|
f04f91e1e3 | ||
|
|
6936908f86 | ||
|
|
f3e5763c6a | ||
|
|
f438f7b1fb | ||
|
|
66a157868f | ||
|
|
a966b694ea | ||
|
|
c9dd3af278 | ||
|
|
82a60ee07c | ||
|
|
8bc5113bd2 | ||
|
|
00d82f7f00 | ||
|
|
2781f33fb5 | ||
|
|
271fe5fbee | ||
|
|
0f503f72b5 | ||
|
|
424b86a261 | ||
|
|
1fe595f4cc | ||
|
|
b8c59f1183 | ||
|
|
a935347aed | ||
|
|
661d0a8669 | ||
|
|
63ff5fd334 | ||
|
|
146b9245ab | ||
|
|
99d33922be | ||
|
|
c42634af3f | ||
|
|
6cb59cc3ab | ||
|
|
e0481686b7 | ||
|
|
804ade3a40 | ||
|
|
c5ccaef0c4 | ||
|
|
c4416d406a | ||
|
|
6b8a23ae10 | ||
|
|
872d5d766e | ||
|
|
f5abd0719c | ||
|
|
6462ffc15d | ||
|
|
6333cafd38 | ||
|
|
03c59811a3 | ||
|
|
cfd3b5bbcb | ||
|
|
97ab67240f | ||
|
|
7fc664185c | ||
|
|
93094367c7 | ||
|
|
e8fa9c6eea | ||
|
|
79a01c45cc | ||
|
|
a440d12377 | ||
|
|
8085888c0c | ||
|
|
c2617fe778 | ||
|
|
2e1243864c | ||
|
|
ba5ff9b38c | ||
|
|
3fccebe132 | ||
|
|
1265b366c1 | ||
|
|
c944fb3234 | ||
|
|
e6b4d17027 | ||
|
|
f55ac36189 | ||
|
|
550d6037a6 | ||
|
|
e875c978c9 | ||
|
|
fbf510567c | ||
|
|
94fe98b9ec | ||
|
|
a328d57551 | ||
|
|
a9eabc5d9d | ||
|
|
1ed6140cb6 | ||
|
|
efceb777f0 | ||
|
|
14bbbd9e45 | ||
|
|
3cdc6da428 | ||
|
|
459ff8c51c | ||
|
|
88665cf7dd | ||
|
|
0a749da85f | ||
|
|
f81604133a | ||
|
|
cdd9b74cbc | ||
|
|
3fb37b4f30 | ||
|
|
2fe8b58c09 | ||
|
|
61d0c4134d | ||
|
|
6b36fe5fca | ||
|
|
c9f54947e3 | ||
|
|
ae6fec5ac5 | ||
|
|
298726ab2b | ||
|
|
7222bc82e1 | ||
|
|
4a568835d2 | ||
|
|
f98282d6c5 | ||
|
|
f864adf97e | ||
|
|
8f6882b0ff | ||
|
|
b6531e3e70 | ||
|
|
ef662c1145 | ||
|
|
b8e5346660 | ||
|
|
aedef123c9 | ||
|
|
8ff8e599d8 | ||
|
|
815cdc0a88 | ||
|
|
b420d828ee | ||
|
|
7b92903536 | ||
|
|
2bde693c35 | ||
|
|
7daea737c6 | ||
|
|
0d75dc3ba0 | ||
|
|
0622357a64 | ||
|
|
c4f91ba28b | ||
|
|
5ade0657ac | ||
|
|
cca9083dff | ||
|
|
3f4ddaaa0c | ||
|
|
7024909e05 | ||
|
|
3899dce353 | ||
|
|
4830aa5a6c | ||
|
|
3608576417 | ||
|
|
043c234401 | ||
|
|
8663c78b63 | ||
|
|
b847683717 | ||
|
|
09400a2847 | ||
|
|
2bc6fbef2f | ||
|
|
b77749e6ba | ||
|
|
1643454190 | ||
|
|
c2f1fe718d | ||
|
|
444ecf032d | ||
|
|
dd230c2407 | ||
|
|
cd87b6ed31 | ||
|
|
6f50af479d | ||
|
|
36a67911b3 | ||
|
|
2dbfef322a | ||
|
|
fba4e27757 | ||
|
|
abc0f2768b | ||
|
|
e7fe30e201 | ||
|
|
c54a01ca59 | ||
|
|
a12c4e6b93 | ||
|
|
a9be771f79 | ||
|
|
a7d35dba4a | ||
|
|
3a6e4a7001 | ||
|
|
bb0e41e949 | ||
|
|
6844f0b90b | ||
|
|
fb2a0ba668 | ||
|
|
e34f8f3660 | ||
|
|
067402831a | ||
|
|
fd3f9dba8f | ||
|
|
27f0364c1d | ||
|
|
8dac714214 | ||
|
|
732a712e3d | ||
|
|
6d278d4bec | ||
|
|
c39b4d2179 | ||
|
|
a653fd5253 | ||
|
|
f754bf274d | ||
|
|
fcac8a8c7d | ||
|
|
d82c788a18 | ||
|
|
946a9ef02b | ||
|
|
c343eed5a0 | ||
|
|
6162a1e1f2 | ||
|
|
f61729deed | ||
|
|
7a00e88f1f | ||
|
|
ff41efba72 | ||
|
|
26e6a00bf5 | ||
|
|
9d61b9048c | ||
|
|
9950b3d6c2 | ||
|
|
e0d30ea990 | ||
|
|
293752f90a | ||
|
|
ac1e5c29d3 | ||
|
|
d868e6d9f0 | ||
|
|
f5cb7f06e1 | ||
|
|
5ce8035820 | ||
|
|
e3a8bde150 | ||
|
|
d6af7c8cca | ||
|
|
6584d8232c | ||
|
|
3487078c03 |
11
.github/dependabot.yml
vendored
Normal file
11
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "pip" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
19
.github/workflows/add-issue-to-project.yml
vendored
Normal file
19
.github/workflows/add-issue-to-project.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: Add issue to project
|
||||
|
||||
on:
|
||||
issues:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
|
||||
jobs:
|
||||
add-to-project:
|
||||
name: Add issue to project
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/add-to-project@v0.5.0
|
||||
with:
|
||||
# You can target a project in a different organization
|
||||
# to the issue
|
||||
project-url: https://github.com/orgs/mvt-project/projects/1
|
||||
github-token: ${{ secrets.ADD_TO_PROJECT_PAT }}
|
||||
23
.github/workflows/mypy.yml
vendored
Normal file
23
.github/workflows/mypy.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
name: Mypy
|
||||
on: workflow_dispatch
|
||||
|
||||
jobs:
|
||||
mypy_py3:
|
||||
name: Mypy check
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
cache: 'pip'
|
||||
- name: Checkout
|
||||
uses: actions/checkout@master
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
pip install mypy
|
||||
- name: mypy
|
||||
run: |
|
||||
make mypy
|
||||
61
.github/workflows/publish-release-docker.yml
vendored
Normal file
61
.github/workflows/publish-release-docker.yml
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
#
|
||||
name: Create and publish a Docker image
|
||||
|
||||
# Configures this workflow to run every time a release is published.
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
# Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds.
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
# There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu.
|
||||
jobs:
|
||||
build-and-push-image:
|
||||
runs-on: ubuntu-latest
|
||||
# Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job.
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
attestations: write
|
||||
id-token: write
|
||||
#
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
# Uses the `docker/login-action` action to log in to the Container registry registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here.
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
# This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels.
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
# This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages.
|
||||
# It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository.
|
||||
# It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step.
|
||||
- name: Build and push Docker image
|
||||
id: push
|
||||
uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
# This step generates an artifact attestation for the image, which is an unforgeable statement about where and how it was built. It increases supply chain security for people who consume the image. For more information, see "[AUTOTITLE](/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds)."
|
||||
- name: Generate artifact attestation
|
||||
uses: actions/attest-build-provenance@v1
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
push-to-registry: true
|
||||
|
||||
43
.github/workflows/python-package.yml
vendored
43
.github/workflows/python-package.yml
vendored
@@ -1,43 +0,0 @@
|
||||
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
|
||||
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
|
||||
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# python-version: [3.7, 3.8, 3.9]
|
||||
python-version: [3.8, 3.9]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install flake8 pytest safety stix2
|
||||
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
|
||||
python -m pip install .
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
# stop the build if there are Python syntax errors or undefined names
|
||||
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
|
||||
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
|
||||
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
|
||||
- name: Safety checks
|
||||
run: safety check
|
||||
- name: Test with pytest
|
||||
run: pytest
|
||||
27
.github/workflows/ruff.yml
vendored
Normal file
27
.github/workflows/ruff.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
name: Ruff
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
ruff_py3:
|
||||
name: Ruff syntax check
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
cache: 'pip'
|
||||
- name: Checkout
|
||||
uses: actions/checkout@master
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
pip install ruff
|
||||
- name: ruff
|
||||
run: |
|
||||
make ruff
|
||||
96
.github/workflows/scripts/update-ios-releases.py
vendored
Normal file
96
.github/workflows/scripts/update-ios-releases.py
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
"""
|
||||
Python script to download the Apple RSS feed and parse it.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import urllib.request
|
||||
from xml.dom.minidom import parseString
|
||||
|
||||
from packaging import version
|
||||
|
||||
|
||||
def download_apple_rss(feed_url):
|
||||
with urllib.request.urlopen(feed_url) as f:
|
||||
rss_feed = f.read().decode("utf-8")
|
||||
print("Downloaded RSS feed from Apple.")
|
||||
return rss_feed
|
||||
|
||||
|
||||
def parse_latest_ios_versions(rss_feed_text):
|
||||
latest_ios_versions = []
|
||||
|
||||
parsed_feed = parseString(rss_feed_text)
|
||||
for item in parsed_feed.getElementsByTagName("item"):
|
||||
title = item.getElementsByTagName("title")[0].firstChild.data
|
||||
if not title.startswith("iOS"):
|
||||
continue
|
||||
|
||||
import re
|
||||
|
||||
build_match = re.match(
|
||||
r"iOS (?P<version>[\d\.]+) (?P<beta>beta )?(\S*)?\((?P<build>.*)\)", title
|
||||
)
|
||||
if not build_match:
|
||||
print("Could not parse iOS build:", title)
|
||||
continue
|
||||
|
||||
# Handle iOS beta releases
|
||||
release_info = build_match.groupdict()
|
||||
release_beta = release_info.pop("beta")
|
||||
if release_beta:
|
||||
print("Skipping beta release:", title)
|
||||
continue
|
||||
|
||||
# Some iOS releases have multiple build number for different hardware models.
|
||||
# We will split these into separate entries and record each build number.
|
||||
build_list = release_info.pop("build")
|
||||
build_variants = build_list.split(" | ")
|
||||
for build_number in build_variants:
|
||||
release_info["build"] = build_number
|
||||
latest_ios_versions.append(release_info)
|
||||
|
||||
return latest_ios_versions
|
||||
|
||||
|
||||
def update_mvt(mvt_checkout_path, latest_ios_versions):
|
||||
version_path = os.path.join(mvt_checkout_path, "src/mvt/ios/data/ios_versions.json")
|
||||
with open(version_path, "r") as version_file:
|
||||
current_versions = json.load(version_file)
|
||||
|
||||
new_entry_count = 0
|
||||
for new_version in latest_ios_versions:
|
||||
for current_version in current_versions:
|
||||
if new_version["build"] == current_version["build"]:
|
||||
break
|
||||
else:
|
||||
# New version that does not exist in current data
|
||||
current_versions.append(new_version)
|
||||
new_entry_count += 1
|
||||
|
||||
if not new_entry_count:
|
||||
print("No new iOS versions found.")
|
||||
else:
|
||||
print("Found {} new iOS versions.".format(new_entry_count))
|
||||
new_version_list = sorted(
|
||||
current_versions, key=lambda x: version.Version(x["version"])
|
||||
)
|
||||
with open(version_path, "w") as version_file:
|
||||
json.dump(new_version_list, version_file, indent=4)
|
||||
|
||||
|
||||
def main():
|
||||
print("Downloading RSS feed...")
|
||||
mvt_checkout_path = os.path.abspath(
|
||||
os.path.join(os.path.dirname(__file__), "../../../")
|
||||
)
|
||||
|
||||
rss_feed = download_apple_rss(
|
||||
"https://developer.apple.com/news/releases/rss/releases.rss"
|
||||
)
|
||||
latest_ios_version = parse_latest_ios_versions(rss_feed)
|
||||
update_mvt(mvt_checkout_path, latest_ios_version)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
38
.github/workflows/tests.yml
vendored
Normal file
38
.github/workflows/tests.yml
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
name: Tests
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Run Python Tests
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ['3.10', '3.11', '3.12', '3.13', '3.14']
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
make install
|
||||
make test-requirements
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
set -o pipefail
|
||||
make test-ci | tee pytest-coverage.txt
|
||||
|
||||
- name: Pytest coverage comment
|
||||
continue-on-error: true # Workflows running on a fork can't post comments
|
||||
uses: MishaKav/pytest-coverage-comment@main
|
||||
if: github.event_name == 'pull_request'
|
||||
with:
|
||||
pytest-coverage-path: ./pytest-coverage.txt
|
||||
junitxml-path: ./pytest.xml
|
||||
30
.github/workflows/update-ios-data.yml
vendored
Normal file
30
.github/workflows/update-ios-data.yml
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
name: Update iOS releases and version numbers
|
||||
run-name: ${{ github.actor }} is finding the latest iOS release version and build numbers
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# * is a special character in YAML so you have to quote this string
|
||||
- cron: '0 */6 * * *'
|
||||
|
||||
|
||||
jobs:
|
||||
update-ios-version:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- name: Run script to fetch latest iOS releases from Apple RSS feed.
|
||||
run: python3 .github/workflows/scripts/update-ios-releases.py
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
title: '[auto] Update iOS releases and versions'
|
||||
commit-message: Add new iOS versions and build numbers
|
||||
branch: auto/add-new-ios-releases
|
||||
draft: true
|
||||
body: |
|
||||
This is an automated pull request to update the iOS releases and version numbers.
|
||||
add-paths: |
|
||||
*.json
|
||||
labels: |
|
||||
automated pr
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -50,6 +50,8 @@ coverage.xml
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
pytest-coverage.txt
|
||||
pytest.xml
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
@@ -133,4 +135,7 @@ dmypy.json
|
||||
*~
|
||||
|
||||
# IDEA Dev Environment
|
||||
.idea
|
||||
.idea
|
||||
|
||||
# Sublime Text project files
|
||||
*.sublime*
|
||||
@@ -5,11 +5,15 @@
|
||||
# Required
|
||||
version: 2
|
||||
|
||||
build:
|
||||
os: "ubuntu-22.04"
|
||||
tools:
|
||||
python: "3.11"
|
||||
|
||||
mkdocs:
|
||||
configuration: mkdocs.yml
|
||||
|
||||
# Optionally set the version of Python and requirements required to build your docs
|
||||
python:
|
||||
version: 3.7
|
||||
install:
|
||||
- requirements: docs/requirements.txt
|
||||
|
||||
11
.safety-policy.yml
Normal file
11
.safety-policy.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
# Safety Security and License Configuration file
|
||||
# We recommend checking this file into your source control in the root of your Python project
|
||||
# If this file is named .safety-policy.yml and is in the same directory where you run `safety check` it will be used by default.
|
||||
# Otherwise, you can use the flag `safety check --policy-file <path-to-this-file>` to specify a custom location and name for the file.
|
||||
# To validate and review your policy file, run the validate command: `safety validate policy_file --path <path-to-this-file>`
|
||||
security: # configuration for the `safety check` command
|
||||
ignore-vulnerabilities: # Here you can list multiple specific vulnerabilities you want to ignore (optionally for a time period)
|
||||
67599: # Example vulnerability ID
|
||||
reason: disputed, inapplicable
|
||||
70612:
|
||||
reason: disputed, inapplicable
|
||||
7
AUTHORS
7
AUTHORS
@@ -1,7 +0,0 @@
|
||||
MVT was originally authored by Claudio Guarnieri <nex@nex.sx>.
|
||||
|
||||
For an up-to-date list of all contributors visit:
|
||||
https://github.com/mvt-project/mvt/graphs/contributors
|
||||
|
||||
Or run:
|
||||
git shortlog -s -n
|
||||
65
CONTRIBUTING.md
Normal file
65
CONTRIBUTING.md
Normal file
@@ -0,0 +1,65 @@
|
||||
# Contributing to Mobile Verification Toolkit (MVT)
|
||||
|
||||
We greatly appreciate contributions to MVT!
|
||||
|
||||
Your involvement, whether through identifying issues, improving functionality, or enhancing documentation, is very much appreciated. To ensure smooth collaboration and a welcoming environment, we've outlined some key guidelines for contributing below.
|
||||
|
||||
## Getting started
|
||||
|
||||
Contributing to an open-source project like MVT might seem overwhelming at first, but we're here to support you!
|
||||
|
||||
Whether you're a technologist, a frontline human rights defender, a field researcher, or someone new to consensual spyware forensics, there are many ways to make meaningful contributions.
|
||||
|
||||
Here's how you can get started:
|
||||
|
||||
1. **Explore the codebase:**
|
||||
- Browse the repository to get familar with MVT. Many MVT modules are simple in functionality and easy to understand.
|
||||
- Look for `TODO:` or `FIXME:` comments in the code for areas that need attention.
|
||||
|
||||
2. **Check Github issues:**
|
||||
- Look for issues tagged with ["help wanted"](https://github.com/mvt-project/mvt/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22) or ["good first issue"](https://github.com/mvt-project/mvt/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) to find tasks that are beginner-friendly or where input from the community would be helpful.
|
||||
|
||||
3. **Ask for guidance:**
|
||||
|
||||
- If you're unsure where to start, feel free to open a [discussion](https://github.com/mvt-project/mvt/discussions) or comment on an issue.
|
||||
|
||||
## How to contribute:
|
||||
|
||||
1. **Report issues:**
|
||||
|
||||
- Found a bug? Please check existing issues to see if it's already reported. If not, open a new issue. Mobile operating systems and databases are constantly evolving, an new errors may appear spontaniously in new app versions.
|
||||
|
||||
**Please provide as much information as possible about the prodblem including: any error messages, steps to reproduce the problem, and any logs or screenshots that can help.**
|
||||
|
||||
|
||||
2. **Suggest features:**
|
||||
- If you have an idea for new functionality, create a feature request issue and describe your proposal.
|
||||
|
||||
3. **Submit code:**
|
||||
- Fork the repository and create a new branch for your changes.
|
||||
- Ensure your changes align with the code style guidelines (see below).
|
||||
- Open a pull request (PR) with a clear description of your changes and link it to any relevant issues.
|
||||
|
||||
4. **Documentation contributions:**
|
||||
- Improving documentation is just as valuable as contributing code! If you notice gaps or inaccuracies in the documentation, feel free to submit changes or suggest updates.
|
||||
|
||||
## Code style
|
||||
Please follow these code style guidelines for consistency and readability:
|
||||
|
||||
- **Indentation**: use 4 spaces per tab.
|
||||
- **Quotes**: Use double quotes (`"`) by default. Use single quotes (`'`) for nested strings instead of escaping (`\"`), or when using f-formatting.
|
||||
- **Maximum line length**:
|
||||
- Aim for lines no longer than 80 characters.
|
||||
- Exceptions are allowed for long log lines or strings, which may extend up to 100 characters.
|
||||
- Wrap lines that exceed 100 characters.
|
||||
|
||||
Follow [PEP 8 guidelines](https://peps.python.org/pep-0008/) for indentation and overall Python code style. All MVT code is automatically linted with [Ruff](https://github.com/astral-sh/ruff) before merging.
|
||||
|
||||
Please check your code before opening a pull request by running `make ruff`
|
||||
|
||||
|
||||
## Community and support
|
||||
|
||||
We aim to create a supportive and collaborative environment for all contributors. If you run into any challenges, feel free to reach out through the discussions or issues section of the repository.
|
||||
|
||||
Your contributions, big or small, help improve MVT and are always appreciated.
|
||||
189
Dockerfile
189
Dockerfile
@@ -1,89 +1,158 @@
|
||||
FROM ubuntu:20.04
|
||||
# Base image for building libraries
|
||||
# ---------------------------------
|
||||
FROM ubuntu:22.04 as build-base
|
||||
|
||||
# Ref. https://github.com/mvt-project/mvt
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
LABEL url="https://mvt.re"
|
||||
LABEL vcs-url="https://github.com/mvt-project/mvt"
|
||||
LABEL description="MVT is a forensic tool to look for signs of infection in smartphone devices."
|
||||
|
||||
ENV PIP_NO_CACHE_DIR=1
|
||||
|
||||
# Fixing major OS dependencies
|
||||
# ----------------------------
|
||||
RUN apt update \
|
||||
&& apt install -y python3 python3-pip libusb-1.0-0-dev \
|
||||
&& apt install -y wget unzip\
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get -y install default-jre-headless \
|
||||
|
||||
# Install build tools for libimobiledevice
|
||||
# ----------------------------------------
|
||||
# Install build tools and dependencies
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
build-essential \
|
||||
checkinstall \
|
||||
git \
|
||||
autoconf \
|
||||
automake \
|
||||
libtool-bin \
|
||||
libplist-dev \
|
||||
libusbmuxd-dev \
|
||||
libssl-dev \
|
||||
sqlite3 \
|
||||
pkg-config \
|
||||
libcurl4-openssl-dev \
|
||||
libusb-1.0-0-dev \
|
||||
libssl-dev \
|
||||
udev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Clean up
|
||||
|
||||
# libplist
|
||||
# --------
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* /var/cache/apt
|
||||
FROM build-base as build-libplist
|
||||
|
||||
# Build
|
||||
RUN git clone https://github.com/libimobiledevice/libplist && cd libplist \
|
||||
&& ./autogen.sh && make -j "$(nproc)" && make install DESTDIR=/build \
|
||||
&& cd .. && rm -rf libplist
|
||||
|
||||
|
||||
# Build libimobiledevice
|
||||
# ----------------------
|
||||
RUN git clone https://github.com/libimobiledevice/libplist \
|
||||
&& git clone https://github.com/libimobiledevice/libimobiledevice-glue \
|
||||
&& git clone https://github.com/libimobiledevice/libusbmuxd \
|
||||
&& git clone https://github.com/libimobiledevice/libimobiledevice \
|
||||
&& git clone https://github.com/libimobiledevice/usbmuxd \
|
||||
# libimobiledevice-glue
|
||||
# ---------------------
|
||||
FROM build-base as build-libimobiledevice-glue
|
||||
|
||||
&& cd libplist && ./autogen.sh && make && make install && ldconfig \
|
||||
# Install dependencies
|
||||
COPY --from=build-libplist /build /
|
||||
|
||||
&& cd ../libimobiledevice-glue && PKG_CONFIG_PATH=/usr/local/lib/pkgconfig ./autogen.sh --prefix=/usr && make && make install && ldconfig \
|
||||
# Build
|
||||
RUN git clone https://github.com/libimobiledevice/libimobiledevice-glue && cd libimobiledevice-glue \
|
||||
&& ./autogen.sh && make -j "$(nproc)" && make install DESTDIR=/build \
|
||||
&& cd .. && rm -rf libimobiledevice-glue
|
||||
|
||||
&& cd ../libusbmuxd && PKG_CONFIG_PATH=/usr/local/lib/pkgconfig ./autogen.sh && make && make install && ldconfig \
|
||||
|
||||
&& cd ../libimobiledevice && PKG_CONFIG_PATH=/usr/local/lib/pkgconfig ./autogen.sh --enable-debug && make && make install && ldconfig \
|
||||
# libtatsu
|
||||
# --------
|
||||
FROM build-base as build-libtatsu
|
||||
|
||||
&& cd ../usbmuxd && PKG_CONFIG_PATH=/usr/local/lib/pkgconfig ./autogen.sh --prefix=/usr --sysconfdir=/etc --localstatedir=/var --runstatedir=/run && make && make install \
|
||||
# Install dependencies
|
||||
COPY --from=build-libplist /build /
|
||||
|
||||
# Clean up.
|
||||
&& cd .. && rm -rf libplist libimobiledevice-glue libusbmuxd libimobiledevice usbmuxd
|
||||
# Build
|
||||
RUN git clone https://github.com/libimobiledevice/libtatsu && cd libtatsu \
|
||||
&& ./autogen.sh && make -j "$(nproc)" && make install DESTDIR=/build \
|
||||
&& cd .. && rm -rf libtatsu
|
||||
|
||||
# Installing MVT
|
||||
# --------------
|
||||
RUN pip3 install mvt
|
||||
|
||||
# libusbmuxd
|
||||
# ----------
|
||||
FROM build-base as build-libusbmuxd
|
||||
|
||||
# Install dependencies
|
||||
COPY --from=build-libplist /build /
|
||||
COPY --from=build-libimobiledevice-glue /build /
|
||||
|
||||
# Build
|
||||
RUN git clone https://github.com/libimobiledevice/libusbmuxd && cd libusbmuxd \
|
||||
&& ./autogen.sh && make -j "$(nproc)" && make install DESTDIR=/build \
|
||||
&& cd .. && rm -rf libusbmuxd
|
||||
|
||||
|
||||
# libimobiledevice
|
||||
# ----------------
|
||||
FROM build-base as build-libimobiledevice
|
||||
|
||||
# Install dependencies
|
||||
COPY --from=build-libplist /build /
|
||||
COPY --from=build-libtatsu /build /
|
||||
COPY --from=build-libimobiledevice-glue /build /
|
||||
COPY --from=build-libusbmuxd /build /
|
||||
|
||||
# Build
|
||||
RUN git clone https://github.com/libimobiledevice/libimobiledevice && cd libimobiledevice \
|
||||
&& ./autogen.sh --enable-debug && make -j "$(nproc)" && make install DESTDIR=/build \
|
||||
&& cd .. && rm -rf libimobiledevice
|
||||
|
||||
|
||||
# usbmuxd
|
||||
# -------
|
||||
FROM build-base as build-usbmuxd
|
||||
|
||||
# Install dependencies
|
||||
COPY --from=build-libplist /build /
|
||||
COPY --from=build-libimobiledevice-glue /build /
|
||||
COPY --from=build-libusbmuxd /build /
|
||||
COPY --from=build-libimobiledevice /build /
|
||||
|
||||
# Build
|
||||
RUN git clone https://github.com/libimobiledevice/usbmuxd && cd usbmuxd \
|
||||
&& ./autogen.sh --sysconfdir=/etc --localstatedir=/var --runstatedir=/run && make -j "$(nproc)" && make install DESTDIR=/build \
|
||||
&& cd .. && rm -rf usbmuxd && mv /build/lib /build/usr/lib
|
||||
|
||||
|
||||
# Create main image
|
||||
FROM ubuntu:24.04 as main
|
||||
|
||||
LABEL org.opencontainers.image.url="https://mvt.re"
|
||||
LABEL org.opencontainers.image.documentation="https://docs.mvt.re"
|
||||
LABEL org.opencontainers.image.source="https://github.com/mvt-project/mvt"
|
||||
LABEL org.opencontainers.image.title="Mobile Verification Toolkit"
|
||||
LABEL org.opencontainers.image.description="MVT is a forensic tool to look for signs of infection in smartphone devices."
|
||||
LABEL org.opencontainers.image.licenses="MVT License 1.1"
|
||||
LABEL org.opencontainers.image.base.name=docker.io/library/ubuntu:22.04
|
||||
|
||||
# Install runtime dependencies
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
adb \
|
||||
default-jre-headless \
|
||||
libcurl4 \
|
||||
libssl3 \
|
||||
libusb-1.0-0 \
|
||||
python3 \
|
||||
sqlite3
|
||||
COPY --from=build-libplist /build /
|
||||
COPY --from=build-libimobiledevice-glue /build /
|
||||
COPY --from=build-libtatsu /build /
|
||||
COPY --from=build-libusbmuxd /build /
|
||||
COPY --from=build-libimobiledevice /build /
|
||||
COPY --from=build-usbmuxd /build /
|
||||
|
||||
# Install mvt using the locally checked out source
|
||||
COPY . mvt/
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y git python3-pip \
|
||||
&& PIP_NO_CACHE_DIR=1 pip3 install --break-system-packages ./mvt \
|
||||
&& apt-get remove -y python3-pip git && apt-get autoremove -y \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& rm -rf mvt
|
||||
|
||||
# Installing ABE
|
||||
# --------------
|
||||
RUN mkdir /opt/abe \
|
||||
&& wget https://github.com/nelenkov/android-backup-extractor/releases/download/20210709062403-4c55371/abe.jar -O /opt/abe/abe.jar \
|
||||
ADD --checksum=sha256:a20e07f8b2ea47620aff0267f230c3f1f495f097081fd709eec51cf2a2e11632 \
|
||||
https://github.com/nelenkov/android-backup-extractor/releases/download/master-20221109063121-8fdfc5e/abe.jar /opt/abe/abe.jar
|
||||
# Create alias for abe
|
||||
&& echo 'alias abe="java -jar /opt/abe/abe.jar"' >> ~/.bashrc
|
||||
RUN echo 'alias abe="java -jar /opt/abe/abe.jar"' >> ~/.bashrc
|
||||
|
||||
# Install Android Platform Tools
|
||||
# ------------------------------
|
||||
|
||||
RUN mkdir /opt/android \
|
||||
&& wget -q https://dl.google.com/android/repository/platform-tools-latest-linux.zip \
|
||||
&& unzip platform-tools-latest-linux.zip -d /opt/android \
|
||||
# Create alias for adb
|
||||
&& echo 'alias adb="/opt/android/platform-tools/adb"' >> ~/.bashrc
|
||||
|
||||
# Generate adb key folder
|
||||
# ------------------------------
|
||||
RUN mkdir /root/.android && /opt/android/platform-tools/adb keygen /root/.android/adbkey
|
||||
# Generate adb key folder
|
||||
RUN echo 'if [ ! -f /root/.android/adbkey ]; then adb keygen /root/.android/adbkey 2&>1 > /dev/null; fi' >> ~/.bashrc
|
||||
RUN mkdir /root/.android
|
||||
|
||||
# Setup investigations environment
|
||||
# --------------------------------
|
||||
RUN mkdir /home/cases
|
||||
WORKDIR /home/cases
|
||||
WORKDIR /home/cases
|
||||
RUN echo 'echo "Mobile Verification Toolkit @ Docker\n------------------------------------\n\nYou can find information about how to use this image for Android (https://github.com/mvt-project/mvt/tree/master/docs/android) and iOS (https://github.com/mvt-project/mvt/tree/master/docs/ios) in the official docs of the project.\n"' >> ~/.bashrc \
|
||||
&& echo 'echo "Note that to perform the debug via USB you might need to give the Docker image access to the USB using \"docker run -it --privileged -v /dev/bus/usb:/dev/bus/usb mvt\" or, preferably, the \"--device=\" parameter.\n"' >> ~/.bashrc
|
||||
|
||||
|
||||
36
Dockerfile.android
Normal file
36
Dockerfile.android
Normal file
@@ -0,0 +1,36 @@
|
||||
# Create main image
|
||||
FROM python:3.10.14-alpine3.20 as main
|
||||
|
||||
LABEL org.opencontainers.image.url="https://mvt.re"
|
||||
LABEL org.opencontainers.image.documentation="https://docs.mvt.re"
|
||||
LABEL org.opencontainers.image.source="https://github.com/mvt-project/mvt"
|
||||
LABEL org.opencontainers.image.title="Mobile Verification Toolkit (Android)"
|
||||
LABEL org.opencontainers.image.description="MVT is a forensic tool to look for signs of infection in smartphone devices."
|
||||
LABEL org.opencontainers.image.licenses="MVT License 1.1"
|
||||
LABEL org.opencontainers.image.base.name=docker.io/library/python:3.10.14-alpine3.20
|
||||
|
||||
# Install runtime dependencies
|
||||
RUN apk add --no-cache \
|
||||
android-tools \
|
||||
git \
|
||||
libusb \
|
||||
openjdk11-jre-headless \
|
||||
sqlite
|
||||
|
||||
# Install mvt
|
||||
COPY ./ mvt
|
||||
RUN apk add --no-cache --virtual .build-deps gcc musl-dev \
|
||||
&& PIP_NO_CACHE_DIR=1 pip3 install ./mvt \
|
||||
&& apk del .build-deps gcc musl-dev && rm -rf ./mvt
|
||||
|
||||
# Installing ABE
|
||||
ADD --checksum=sha256:a20e07f8b2ea47620aff0267f230c3f1f495f097081fd709eec51cf2a2e11632 \
|
||||
https://github.com/nelenkov/android-backup-extractor/releases/download/master-20221109063121-8fdfc5e/abe.jar /opt/abe/abe.jar
|
||||
# Create alias for abe
|
||||
RUN echo 'alias abe="java -jar /opt/abe/abe.jar"' >> ~/.bashrc
|
||||
|
||||
# Generate adb key folder
|
||||
RUN echo 'if [ ! -f /root/.android/adbkey ]; then adb keygen /root/.android/adbkey 2&>1 > /dev/null; fi' >> ~/.bashrc
|
||||
RUN mkdir /root/.android
|
||||
|
||||
ENTRYPOINT [ "/usr/local/bin/mvt-android" ]
|
||||
137
Dockerfile.ios
Normal file
137
Dockerfile.ios
Normal file
@@ -0,0 +1,137 @@
|
||||
# Base image for building libraries
|
||||
# ---------------------------------
|
||||
FROM ubuntu:22.04 as build-base
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Install build tools and dependencies
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y \
|
||||
build-essential \
|
||||
git \
|
||||
autoconf \
|
||||
automake \
|
||||
libtool-bin \
|
||||
pkg-config \
|
||||
libcurl4-openssl-dev \
|
||||
libusb-1.0-0-dev \
|
||||
libssl-dev \
|
||||
udev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
|
||||
# libplist
|
||||
# --------
|
||||
FROM build-base as build-libplist
|
||||
|
||||
# Build
|
||||
RUN git clone https://github.com/libimobiledevice/libplist && cd libplist \
|
||||
&& ./autogen.sh && make -j "$(nproc)" && make install DESTDIR=/build \
|
||||
&& cd .. && rm -rf libplist
|
||||
|
||||
|
||||
# libimobiledevice-glue
|
||||
# ---------------------
|
||||
FROM build-base as build-libimobiledevice-glue
|
||||
|
||||
# Install dependencies
|
||||
COPY --from=build-libplist /build /
|
||||
|
||||
# Build
|
||||
RUN git clone https://github.com/libimobiledevice/libimobiledevice-glue && cd libimobiledevice-glue \
|
||||
&& ./autogen.sh && make -j "$(nproc)" && make install DESTDIR=/build \
|
||||
&& cd .. && rm -rf libimobiledevice-glue
|
||||
|
||||
|
||||
# libtatsu
|
||||
# --------
|
||||
FROM build-base as build-libtatsu
|
||||
|
||||
# Install dependencies
|
||||
COPY --from=build-libplist /build /
|
||||
|
||||
# Build
|
||||
RUN git clone https://github.com/libimobiledevice/libtatsu && cd libtatsu \
|
||||
&& ./autogen.sh && make -j "$(nproc)" && make install DESTDIR=/build \
|
||||
&& cd .. && rm -rf libtatsu
|
||||
|
||||
|
||||
# libusbmuxd
|
||||
# ----------
|
||||
FROM build-base as build-libusbmuxd
|
||||
|
||||
# Install dependencies
|
||||
COPY --from=build-libplist /build /
|
||||
COPY --from=build-libimobiledevice-glue /build /
|
||||
|
||||
# Build
|
||||
RUN git clone https://github.com/libimobiledevice/libusbmuxd && cd libusbmuxd \
|
||||
&& ./autogen.sh && make -j "$(nproc)" && make install DESTDIR=/build \
|
||||
&& cd .. && rm -rf libusbmuxd
|
||||
|
||||
|
||||
# libimobiledevice
|
||||
# ----------------
|
||||
FROM build-base as build-libimobiledevice
|
||||
|
||||
# Install dependencies
|
||||
COPY --from=build-libplist /build /
|
||||
COPY --from=build-libtatsu /build /
|
||||
COPY --from=build-libimobiledevice-glue /build /
|
||||
COPY --from=build-libusbmuxd /build /
|
||||
|
||||
# Build
|
||||
RUN git clone https://github.com/libimobiledevice/libimobiledevice && cd libimobiledevice \
|
||||
&& ./autogen.sh --enable-debug && make -j "$(nproc)" && make install DESTDIR=/build \
|
||||
&& cd .. && rm -rf libimobiledevice
|
||||
|
||||
|
||||
# usbmuxd
|
||||
# -------
|
||||
FROM build-base as build-usbmuxd
|
||||
|
||||
# Install dependencies
|
||||
COPY --from=build-libplist /build /
|
||||
COPY --from=build-libimobiledevice-glue /build /
|
||||
COPY --from=build-libusbmuxd /build /
|
||||
COPY --from=build-libimobiledevice /build /
|
||||
|
||||
# Build
|
||||
RUN git clone https://github.com/libimobiledevice/usbmuxd && cd usbmuxd \
|
||||
&& ./autogen.sh --sysconfdir=/etc --localstatedir=/var --runstatedir=/run && make -j "$(nproc)" && make install DESTDIR=/build \
|
||||
&& cd .. && rm -rf usbmuxd && mv /build/lib /build/usr/lib
|
||||
|
||||
|
||||
# Main image
|
||||
# ----------
|
||||
FROM python:3.10.14-alpine3.20 as main
|
||||
|
||||
LABEL org.opencontainers.image.url="https://mvt.re"
|
||||
LABEL org.opencontainers.image.documentation="https://docs.mvt.re"
|
||||
LABEL org.opencontainers.image.source="https://github.com/mvt-project/mvt"
|
||||
LABEL org.opencontainers.image.title="Mobile Verification Toolkit (iOS)"
|
||||
LABEL org.opencontainers.image.description="MVT is a forensic tool to look for signs of infection in smartphone devices."
|
||||
LABEL org.opencontainers.image.licenses="MVT License 1.1"
|
||||
LABEL org.opencontainers.image.base.name=docker.io/library/python:3.10.14-alpine3.20
|
||||
|
||||
# Install runtime dependencies
|
||||
RUN apk add --no-cache \
|
||||
gcompat \
|
||||
libcurl \
|
||||
libssl3 \
|
||||
libusb \
|
||||
sqlite
|
||||
COPY --from=build-libplist /build /
|
||||
COPY --from=build-libimobiledevice-glue /build /
|
||||
COPY --from=build-libtatsu /build /
|
||||
COPY --from=build-libusbmuxd /build /
|
||||
COPY --from=build-libimobiledevice /build /
|
||||
COPY --from=build-usbmuxd /build /
|
||||
|
||||
# Install mvt using the locally checked out source
|
||||
COPY ./ mvt
|
||||
RUN apk add --no-cache --virtual .build-deps git gcc musl-dev \
|
||||
&& PIP_NO_CACHE_DIR=1 pip3 install ./mvt \
|
||||
&& apk del .build-deps git gcc musl-dev && rm -rf ./mvt
|
||||
|
||||
ENTRYPOINT [ "/usr/local/bin/mvt-ios" ]
|
||||
33
Makefile
33
Makefile
@@ -1,10 +1,39 @@
|
||||
PWD = $(shell pwd)
|
||||
|
||||
check: ruff mypy
|
||||
|
||||
ruff:
|
||||
ruff check .
|
||||
|
||||
mypy:
|
||||
mypy
|
||||
|
||||
test:
|
||||
python3 -m pytest
|
||||
|
||||
test-ci:
|
||||
python3 -m pytest -v
|
||||
|
||||
install:
|
||||
python3 -m pip install --upgrade -e .
|
||||
|
||||
test-requirements:
|
||||
python3 -m pip install --upgrade --group dev
|
||||
|
||||
generate-proto-parsers:
|
||||
# Generate python parsers for protobuf files
|
||||
PROTO_FILES=$$(find src/mvt/android/parsers/proto/ -iname "*.proto"); \
|
||||
protoc -Isrc/mvt/android/parsers/proto/ --python_betterproto_out=src/mvt/android/parsers/proto/ $$PROTO_FILES
|
||||
|
||||
clean:
|
||||
rm -rf $(PWD)/build $(PWD)/dist $(PWD)/mvt.egg-info
|
||||
rm -rf $(PWD)/build $(PWD)/dist $(PWD)/src/mvt.egg-info
|
||||
|
||||
dist:
|
||||
python3 setup.py sdist bdist_wheel
|
||||
python3 -m pip install --upgrade build
|
||||
python3 -m build
|
||||
|
||||
upload:
|
||||
python3 -m twine upload dist/*
|
||||
|
||||
test-upload:
|
||||
python3 -m twine upload --repository testpypi dist/*
|
||||
|
||||
29
README.md
29
README.md
@@ -1,36 +1,51 @@
|
||||
<p align="center">
|
||||
<img src="./docs/mvt.png" width="200" />
|
||||
<img src="https://docs.mvt.re/en/latest/mvt.png" width="200" />
|
||||
</p>
|
||||
|
||||
# Mobile Verification Toolkit
|
||||
|
||||
[](https://pypi.org/project/mvt/)
|
||||
[](https://docs.mvt.re/en/latest/?badge=latest)
|
||||
[](https://github.com/mvt-project/mvt/actions/workflows/python-package.yml)
|
||||
[](https://github.com/mvt-project/mvt/actions/workflows/tests.yml)
|
||||
[](https://pepy.tech/project/mvt)
|
||||
|
||||
Mobile Verification Toolkit (MVT) is a collection of utilities to simplify and automate the process of gathering forensic traces helpful to identify a potential compromise of Android and iOS devices.
|
||||
|
||||
It has been developed and released by the [Amnesty International Security Lab](https://www.amnesty.org/en/tech/) in July 2021 in the context of the [Pegasus project](https://forbiddenstories.org/about-the-pegasus-project/) along with [a technical forensic methodology and forensic evidence](https://www.amnesty.org/en/latest/research/2021/07/forensic-methodology-report-how-to-catch-nso-groups-pegasus/).
|
||||
It has been developed and released by the [Amnesty International Security Lab](https://securitylab.amnesty.org) in July 2021 in the context of the [Pegasus Project](https://forbiddenstories.org/about-the-pegasus-project/) along with [a technical forensic methodology](https://www.amnesty.org/en/latest/research/2021/07/forensic-methodology-report-how-to-catch-nso-groups-pegasus/). It continues to be maintained by Amnesty International and other contributors.
|
||||
|
||||
*Warning*: MVT is a forensic research tool intended for technologists and investigators. Using it requires understanding the basics of forensic analysis and using command-line tools. This is not intended for end-user self-assessment. If you are concerned with the security of your device please seek expert assistance.
|
||||
> **Note**
|
||||
> MVT is a forensic research tool intended for technologists and investigators. It requires understanding digital forensics and using command-line tools. This is not intended for end-user self-assessment. If you are concerned with the security of your device please seek reputable expert assistance.
|
||||
>
|
||||
|
||||
### Indicators of Compromise
|
||||
|
||||
MVT supports using public [indicators of compromise (IOCs)](https://github.com/mvt-project/mvt-indicators) to scan mobile devices for potential traces of targeting or infection by known spyware campaigns. This includes IOCs published by [Amnesty International](https://github.com/AmnestyTech/investigations/) and other research groups.
|
||||
|
||||
> **Warning**
|
||||
> Public indicators of compromise are insufficient to determine that a device is "clean", and not targeted with a particular spyware tool. Reliance on public indicators alone can miss recent forensic traces and give a false sense of security.
|
||||
>
|
||||
> Reliable and comprehensive digital forensic support and triage requires access to non-public indicators, research and threat intelligence.
|
||||
>
|
||||
>Such support is available to civil society through [Amnesty International's Security Lab](https://securitylab.amnesty.org/get-help/?c=mvt_docs) or through our forensic partnership with [Access Now’s Digital Security Helpline](https://www.accessnow.org/help/).
|
||||
|
||||
More information about using indicators of compromise with MVT is available in the [documentation](https://docs.mvt.re/en/latest/iocs/).
|
||||
|
||||
## Installation
|
||||
|
||||
MVT can be installed from sources or from [PyPi](https://pypi.org/project/mvt/) (you will need some dependencies, check the [documentation](https://docs.mvt.re/en/latest/install/)):
|
||||
MVT can be installed from sources or from [PyPI](https://pypi.org/project/mvt/) (you will need some dependencies, check the [documentation](https://docs.mvt.re/en/latest/install/)):
|
||||
|
||||
```
|
||||
pip3 install mvt
|
||||
```
|
||||
|
||||
Alternatively, you can decide to run MVT and all relevant tools through a [Docker container](https://docs.mvt.re/en/latest/docker/).
|
||||
For alternative installation options and known issues, please refer to the [documentation](https://docs.mvt.re/en/latest/install/) as well as [GitHub Issues](https://github.com/mvt-project/mvt/issues).
|
||||
|
||||
**Please note:** MVT is best run on Linux or Mac systems. [It does not currently support running natively on Windows.](https://docs.mvt.re/en/latest/install/#mvt-on-windows)
|
||||
|
||||
## Usage
|
||||
|
||||
MVT provides two commands `mvt-ios` and `mvt-android`. [Check out the documentation to learn how to use them!](https://docs.mvt.re/)
|
||||
|
||||
|
||||
## License
|
||||
|
||||
The purpose of MVT is to facilitate the ***consensual forensic analysis*** of devices of those who might be targets of sophisticated mobile spyware attacks, especially members of civil society and marginalized communities. We do not want MVT to enable privacy violations of non-consenting individuals. In order to achieve this, MVT is released under its own license. [Read more here.](https://docs.mvt.re/en/latest/license/)
|
||||
|
||||
62
SECURITY.md
Normal file
62
SECURITY.md
Normal file
@@ -0,0 +1,62 @@
|
||||
# Reporting security issues
|
||||
|
||||
Thank you for your interest in reporting security issues and vulnerabilities! Security research is of utmost importance and we take all reports seriously. If you discover an issue please report it to us right away!
|
||||
|
||||
Please DO NOT file a public issue, instead send your report privately to the MVT maintainers at Amnesty International via `security [at] amnesty [dot] tech`.
|
||||
|
||||
You can also write PGP-encrypted emails to key `CFBF9698DCA8EB2A80F48ADEA035A030FA04ED13`. The corresponding PGP public key is lited below.
|
||||
|
||||
```
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
mQINBGlFPwsBEADQ+d7SeHrFPYv3wPOjWs2oMpp0DPdfIyGbg+iYWOC36FegZhKY
|
||||
+WeK96GqJWt8wD6kwFUVwQI795WZrjSd1q4a7wR+kj/h7xlRB6ZfVICA6O5DOOm6
|
||||
GNMvqy7ESm8g1XZDpb2u1BXmSS9X8f6rjB0e86kYsF1mB5/2USTM63jgDs0GGTkZ
|
||||
Q1z4Mq4gYyqH32b3gvXkbb68LeQmONUIM3cgmec9q8/pNc1l7fcoLWhOVADRj17Q
|
||||
plisa/EUf/SYqdtk9w7EHGggNenKNwVM235mkPcMqmE72bTpjT6XCxvZY3ByG5yi
|
||||
7L+tHJU45ZuXtt62EvX03azxThVfSmH/WbRk8lH8+CW8XMmiWZphG4ydPWqgVKCB
|
||||
2UOXm+6CQnKA+7Dt1AeK2t5ciATrv9LvwgSxk5WKc3288XFLA6eGMrTdQygYlLjJ
|
||||
+42RSdK/7fCt/qk4q13oUw8ZTVcCia98uZFi704XuuYTH6NrntIB7j/0oucIS4Y9
|
||||
cTWNO5LBerez4v8VI4YHcYESPeIWGFkXhvJzo0VMg1zidBLtiPoGF2JKZGwaK7/p
|
||||
yY1xALskLp4H+5OY4eB1kf8kl4vGsEK8xA/NNzOiapVmwBXpvVvmXIQJE2k+olNf
|
||||
sAuyB8+aO1Ws7tFYt3D+olC7iaprOdK7uA4GCgmYYhq6QQPg+cxfczgHfwARAQAB
|
||||
tD1TZWN1cml0eSBMYWIgYXQgQW1uZXN0eSBJbnRlcm5hdGlvbmFsIDxzZWN1cml0
|
||||
eUBhbW5lc3R5LnRlY2g+iQJRBBMBCAA7FiEEz7+WmNyo6yqA9IreoDWgMPoE7RMF
|
||||
AmlFPwsCGwMFCwkIBwICIgIGFQoJCAsCBBYCAwECHgcCF4AACgkQoDWgMPoE7RNr
|
||||
2w//a88uP90uSN6lgeIwKsHr1ri27QIBbzCV6hLN/gZBFR2uaiOn/xfFDbnR0Cjo
|
||||
5nMCJCT1k4nrPbMTlfmWLCD+YKELBzVqWlw4J2SOg3nznPl2JrL8QBKjwts0sF+h
|
||||
QbRWDsT54wBZnl6ZJJ79eLShNTokBbKnQ7071dMrENr5e2P2sClQXyiIc51ga4FM
|
||||
fHyhsx+GsrdiZNd2AH8912ljW1GuEi3epTO7KMZprmr37mjpZSUToiV59Yhl1Gbo
|
||||
2pixkYJqi62DG02/gTpCjq9NH3cEMxcxjh4E7yCA8ggLG6+IN6woIvPIdOsnQ+Yj
|
||||
d3H4rMNBjPSKoL+bdHILkCnp5HokcbVjNY3QAyOAF4qWhk4GtgpTshwxUmb4Tbay
|
||||
tWLJC2bzjuUBxLkGzMVFfU3B96sVS4Fi0sBaEMBtHskl2f45X8LJhSq//Lw/2L/8
|
||||
34uP/RxDSn+DPvj/yqMpekdCcmeFSTX1A19xkPcc0rVhMRde4VL338R86vzh0gMI
|
||||
1LySDAhXZyVWzrQ5s3n6N3EvCaHCn3qu7ieyFJifCSR7gZqevCEznMQRVpkMTzUt
|
||||
rk13Z6NOOb4IlTW7HFoY3omJG8Z5jV4kMIE7n6nb0qpNYQiG+YvjenQ3VrMoISyh
|
||||
lpS2De8+oOtwrxBVX3+qKWvQqzufeE3416kw2Z+5mxH7bx25Ag0EaUU/CwEQALyZ
|
||||
b+kwLN1yHObTm2yDBEn5HbCT3H1GremvPNmbAaTnfrjUngoKa8MuWWzbX5ptgmZR
|
||||
UpYY/ylOYcgGydz58vUNrPlhIZT9UhmiifPgZLEXyd0uFpr/NsbRajHMkK10iEZf
|
||||
h5bHNobiB7pGCu4Uj9e1cMiIZ4yEaYeyXYUoNHf6ISP39mJhHy6ov5yIpm9q0wzm
|
||||
tGUQPupxGXmEZlOPr3lxqXQ3Ekdv6cWDY5r/oOq71QJ/HUQ13QUuGFIbhnMbT8zd
|
||||
zaS6f/v772YKsWPc4NNUhtlf25VnQ4FuUtjCe3p6iYP4OVD8gJm0GvXyvyTuiQbL
|
||||
CSk/378JiNT7nZzYXxrWchMwvEoMIU55+/UaBc50HI5xvDQ858CX7PYGiimcdsO1
|
||||
EkQzhVxRfjlILfWrC2lgt+H5qhTn4Fah250Xe1PnLjXGHVUQnY/f3MFeiWQgf92b
|
||||
02+MfvOeC5OKttP1z5lcx6RFWCIa1E/u8Nj7YrH9hk0ZBRAnBaeAncDFY8dfX2zX
|
||||
VMoc0dV16gM7RrZ6i7D3CG3eLLkQlX0jbW9dzTuG/3f098EWB1p8vOfS/RbNCBRX
|
||||
jqGiqacL/aFF3Ci3nQ4O5tSv1XipbgrUhvXnwm9pxrLPS/45iaO59WN4RRGWLLQ7
|
||||
LHmeBxoa9avv0SdBYUL+eBxY46GXb/j5VLzHYhSnABEBAAGJAjYEGAEIACAWIQTP
|
||||
v5aY3KjrKoD0it6gNaAw+gTtEwUCaUU/CwIbDAAKCRCgNaAw+gTtEyvsEACnyFFD
|
||||
alOZTrrJTXNnUejuiExLh+qTO3T91p5bte597jpwCZnYGwkxEfffsqqhlY6ftEOf
|
||||
d5tNWE5isai4v8XCbplWomz4KBpepxcn2b+9o5dSyr1vohEFuCJziZDsta1J2DX5
|
||||
IE9U48kTgLDfdIBhuOyHNRkvXRHP2OVLCaiw4d9q+hlrraR8pehHt2BJSxh+QZoe
|
||||
n0iHvIZCBIUA45zLEGmXFpNTGeEf2dKPp3xOkAXOhAMPptE0V1itkF3R7kEW4aFO
|
||||
SZo8L3C1aWSz/gQ4/vvW5t1IJxirNMUgTMQFvqEkAwX3fm6GCxlgRSvTTRXdcrS8
|
||||
6qyFdH1nkCNsavPahN3N2RGGIlWtODEMTO1Hjy0kZtTYdW+JH9sendliCoJES+yN
|
||||
DjM125SgdAgrqlSYm/g8n9knWpxZv1QM6jU/sVz1J+l6/ixugL2i+CAL2d6uv4tT
|
||||
QmXnu7Ei4/2kHBUu3Lf59MNgmLHm6F7AhOWErszSeoJKsp+3yA1oTT/npz67sRzY
|
||||
VVyxz4NBIollna59a1lz0RhlWzNKqNB27jhylyM4ltdzHB7r4VMAVJyttozmIIOC
|
||||
35ucYxl5BHLuapaRSaYHdUId1LOccYyaOOFF/PSyCu9dKzXk7zEz2HNcIboWSkAE
|
||||
8ZDExMYM4WVpVCOj+frdsaBvzItHacRWuijtkw==
|
||||
=JAXX
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
||||
```
|
||||
@@ -1,14 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from mvt import android
|
||||
|
||||
android.cli()
|
||||
14
dev/mvt-ios
14
dev/mvt-ios
@@ -1,14 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from mvt import ios
|
||||
|
||||
ios.cli()
|
||||
@@ -16,6 +16,12 @@ Now you can try launching MVT with:
|
||||
mvt-android check-adb --output /path/to/results
|
||||
```
|
||||
|
||||
!!! warning
|
||||
The `check-adb` command is deprecated and will be removed in a future release.
|
||||
Whenever possible, prefer acquiring device data using the AndroidQF project (https://github.com/mvt-project/androidqf/) and then analyze those acquisitions with MVT.
|
||||
|
||||
Running `mvt-android check-adb` will also emit a runtime deprecation warning advising you to migrate to AndroidQF.
|
||||
|
||||
If you have previously started an adb daemon MVT will alert you and require you to kill it with `adb kill-server` and relaunch the command.
|
||||
|
||||
!!! warning
|
||||
@@ -37,6 +43,14 @@ mvt-android check-adb --serial 192.168.1.20:5555 --output /path/to/results
|
||||
|
||||
Where `192.168.1.20` is the correct IP address of your device.
|
||||
|
||||
!!! warning
|
||||
The `check-adb` workflow shown above is deprecated. If you can acquire an AndroidQF acquisition from the device (recommended), use the AndroidQF project to create that acquisition: https://github.com/mvt-project/androidqf/
|
||||
|
||||
AndroidQF acquisitions provide a more stable, reproducible analysis surface and are the preferred workflow going forward.
|
||||
|
||||
## MVT modules requiring root privileges
|
||||
|
||||
!!! warning
|
||||
Deprecated: many `mvt-android check-adb` workflows are deprecated and will be removed in a future release. Whenever possible, prefer acquiring an AndroidQF acquisition using the AndroidQF project (https://github.com/mvt-project/androidqf/).
|
||||
|
||||
Of the currently available `mvt-android check-adb` modules a handful require root privileges to function correctly. This is because certain files, such as browser history and SMS messages databases are not accessible with user privileges through adb. These modules are to be considered OPTIONALLY available in case the device was already jailbroken. **Do NOT jailbreak your own device unless you are sure of what you are doing!** Jailbreaking your phone exposes it to considerable security risks!
|
||||
|
||||
@@ -24,7 +24,7 @@ Some recent phones will enforce the utilisation of a password to encrypt the bac
|
||||
|
||||
## Unpack and check the backup
|
||||
|
||||
MVT includes a partial implementation of the Android Backup parsing, because of the implementation difference in the compression algorithm between Java and Python. The `-nocompress` option passed to adb in the section above allows to avoid this issue. You can analyse and extract SMSs containing links from the backup directly with MVT:
|
||||
MVT includes a partial implementation of the Android Backup parsing, because of the implementation difference in the compression algorithm between Java and Python. The `-nocompress` option passed to adb in the section above allows to avoid this issue. You can analyse and extract SMSs from the backup directly with MVT:
|
||||
|
||||
```bash
|
||||
$ mvt-android check-backup --output /path/to/results/ /path/to/backup.ab
|
||||
@@ -32,10 +32,14 @@ $ mvt-android check-backup --output /path/to/results/ /path/to/backup.ab
|
||||
INFO [mvt.android.modules.backup.sms] Running module SMS...
|
||||
INFO [mvt.android.modules.backup.sms] Processing SMS backup file at
|
||||
apps/com.android.providers.telephony/d_f/000000_sms_backup
|
||||
INFO [mvt.android.modules.backup.sms] Extracted a total of 64 SMS messages containing links
|
||||
INFO [mvt.android.modules.backup.sms] Extracted a total of 64 SMS messages
|
||||
```
|
||||
|
||||
If the backup is encrypted, MVT will prompt you to enter the password.
|
||||
If the backup is encrypted, MVT will prompt you to enter the password. A backup password can also be provided with the `--backup-password` command line option or through the `MVT_ANDROID_BACKUP_PASSWORD` environment variable. The same options can also be used to when analysing an encrypted backup collected through AndroidQF in the `mvt-android check-androidqf` command:
|
||||
|
||||
```bash
|
||||
$ mvt-android check-backup --backup-password "password123" --output /path/to/results/ /path/to/backup.ab
|
||||
```
|
||||
|
||||
Through the `--iocs` argument you can specify a [STIX2](https://oasis-open.github.io/cti-documentation/stix/intro) file defining a list of malicious indicators to check against the records extracted from the backup by MVT. Any matches will be highlighted in the terminal output.
|
||||
|
||||
@@ -52,4 +56,4 @@ If the backup is encrypted, ABE will prompt you to enter the password.
|
||||
|
||||
Alternatively, [ab-decrypt](https://github.com/joernheissler/ab-decrypt) can be used for that purpose.
|
||||
|
||||
You can then extract SMSs containing links with MVT by passing the folder path as parameter instead of the `.ab` file: `mvt-android check-backup --output /path/to/results/ /path/to/backup/`.
|
||||
You can then extract SMSs with MVT by passing the folder path as parameter instead of the `.ab` file: `mvt-android check-backup --output /path/to/results/ /path/to/backup/` (the path to backup given should be the folder containing the `apps` folder).
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Downloading APKs from an Android phone
|
||||
|
||||
MVT allows to attempt to download all available installed packages (APKs) in order to further inspect them and potentially identify any which might be malicious in nature.
|
||||
MVT allows you to attempt to download all available installed packages (APKs) from a device in order to further inspect them and potentially identify any which might be malicious in nature.
|
||||
|
||||
You can do so by launching the following command:
|
||||
|
||||
@@ -13,22 +13,16 @@ It might take several minutes to complete.
|
||||
!!! info
|
||||
MVT will likely warn you it was unable to download certain installed packages. There is no reason to be alarmed: this is typically expected behavior when MVT attempts to download a system package it has no privileges to access.
|
||||
|
||||
Optionally, you can decide to enable lookups of the SHA256 hash of all the extracted APKs on [VirusTotal](https://www.virustotal.com) and/or [Koodous](https://koodous.com). While these lookups do not provide any conclusive assessment on all of the extracted APKs, they might highlight any known malicious ones:
|
||||
Optionally, you can decide to enable lookups of the SHA256 hash of all the extracted APKs on [VirusTotal](https://www.virustotal.com). While these lookups do not provide any conclusive assessment on all of the extracted APKs, they might highlight any known malicious ones:
|
||||
|
||||
```bash
|
||||
mvt-android download-apks --output /path/to/folder --virustotal
|
||||
mvt-android download-apks --output /path/to/folder --koodous
|
||||
MVT_VT_API_KEY=<key> mvt-android download-apks --output /path/to/folder --virustotal
|
||||
```
|
||||
|
||||
Or, to launch all available lookups:
|
||||
Please note that in order to use VirusTotal lookups you are required to provide your own API key through the `MVT_VT_API_KEY` environment variable. You should also note that VirusTotal enforces strict API usage. Be mindful that MVT might consume your hourly search quota.
|
||||
|
||||
In case you have a previous extraction of APKs you want to later check against VirusTotal, you can do so with the following arguments:
|
||||
|
||||
```bash
|
||||
mvt-android download-apks --output /path/to/folder --all-checks
|
||||
MVT_VT_API_KEY=<key> mvt-android download-apks --from-file /path/to/folder/apks.json --virustotal
|
||||
```
|
||||
|
||||
In case you have a previous extraction of APKs you want to later check against VirusTotal and Koodous, you can do so with the following arguments:
|
||||
|
||||
```bash
|
||||
mvt-android download-apks --from-file /path/to/folder/apks.json --all-checks
|
||||
```
|
||||
|
||||
|
||||
@@ -8,8 +8,10 @@ However, not all is lost.
|
||||
|
||||
Because malware attacks over Android typically take the form of malicious or backdoored apps, the very first thing you might want to do is to extract and verify all installed Android packages and triage quickly if there are any which stand out as malicious or which might be atypical.
|
||||
|
||||
While it is out of the scope of this documentation to dwell into details on how to analyze Android apps, MVT does allow to easily and automatically extract information about installed apps, download copies of them, and quickly lookup services such as [VirusTotal](https://www.virustotal.com) or [Koodous](https://koodous.com) which might quickly indicate known bad apps.
|
||||
While it is out of the scope of this documentation to dwell into details on how to analyze Android apps, MVT does allow to easily and automatically extract information about installed apps, download copies of them, and quickly look them up on services such as [VirusTotal](https://www.virustotal.com).
|
||||
|
||||
!!! info "Using VirusTotal"
|
||||
Please note that in order to use VirusTotal lookups you are required to provide your own API key through the `MVT_VT_API_KEY` environment variable. You should also note that VirusTotal enforces strict API usage. Be mindful that MVT might consume your hourly search quota.
|
||||
|
||||
## Check the device over Android Debug Bridge
|
||||
|
||||
|
||||
43
docs/command_completion.md
Normal file
43
docs/command_completion.md
Normal file
@@ -0,0 +1,43 @@
|
||||
# Command Completion
|
||||
|
||||
MVT utilizes the [Click](https://click.palletsprojects.com/en/stable/) library for creating its command line interface.
|
||||
|
||||
Click provides tab completion support for Bash (version 4.4 and up), Zsh, and Fish.
|
||||
|
||||
To enable it, you need to manually register a special function with your shell, which varies depending on the shell you are using.
|
||||
|
||||
The following describes how to generate the command completion scripts and add them to your shell configuration.
|
||||
|
||||
> **Note: You will need to start a new shell for the changes to take effect.**
|
||||
|
||||
### For Bash
|
||||
|
||||
```bash
|
||||
# Generates bash completion scripts
|
||||
echo "$(_MVT_IOS_COMPLETE=bash_source mvt-ios)" > ~/.mvt-ios-complete.bash &&
|
||||
echo "$(_MVT_ANDROID_COMPLETE=bash_source mvt-android)" > ~/.mvt-android-complete.bash
|
||||
```
|
||||
|
||||
Add the following to `~/.bashrc`:
|
||||
```bash
|
||||
# source mvt completion scripts
|
||||
. ~/.mvt-ios-complete.bash && . ~/.mvt-android-complete.bash
|
||||
```
|
||||
|
||||
### For Zsh
|
||||
|
||||
```bash
|
||||
# Generates zsh completion scripts
|
||||
echo "$(_MVT_IOS_COMPLETE=zsh_source mvt-ios)" > ~/.mvt-ios-complete.zsh &&
|
||||
echo "$(_MVT_ANDROID_COMPLETE=zsh_source mvt-android)" > ~/.mvt-android-complete.zsh
|
||||
```
|
||||
|
||||
Add the following to `~/.zshrc`:
|
||||
```bash
|
||||
# source mvt completion scripts
|
||||
. ~/.mvt-ios-complete.zsh && . ~/.mvt-android-complete.zsh
|
||||
```
|
||||
|
||||
For more information, visit the official [Click Docs](https://click.palletsprojects.com/en/stable/shell-completion/#enabling-completion).
|
||||
|
||||
|
||||
27
docs/development.md
Normal file
27
docs/development.md
Normal file
@@ -0,0 +1,27 @@
|
||||
# Development
|
||||
|
||||
The Mobile Verification Toolkit team welcomes contributions of new forensic modules or other contributions which help improve the software.
|
||||
|
||||
## Testing
|
||||
|
||||
MVT uses `pytest` for unit and integration tests. Code style consistency is maintained with `flake8`, `ruff` and `black`. All can
|
||||
be run automatically with:
|
||||
|
||||
```bash
|
||||
make check
|
||||
```
|
||||
|
||||
Run these tests before making new commits or opening pull requests.
|
||||
|
||||
## Profiling
|
||||
|
||||
Some MVT modules extract and process significant amounts of data during the analysis process or while checking results against known indicators. Care must be
|
||||
take to avoid inefficient code paths as we add new modules.
|
||||
|
||||
MVT modules can be profiled with Python built-in `cProfile` by setting the `MVT_PROFILE` environment variable.
|
||||
|
||||
```bash
|
||||
MVT_PROFILE=1 dev/mvt-ios check-backup test_backup
|
||||
```
|
||||
|
||||
Open an issue or PR if you are encountering significant performance issues when analyzing a device with MVT.
|
||||
@@ -1,8 +1,23 @@
|
||||
Using Docker simplifies having all the required dependencies and tools (including most recent versions of [libimobiledevice](https://libimobiledevice.org)) readily installed.
|
||||
Using Docker simplifies having all the required dependencies and tools (including most recent versions of [libimobiledevice](https://libimobiledevice.org)) readily installed. Note that this requires a Linux host, as Docker for Windows and Mac [doesn't support passing through USB devices](https://docs.docker.com/desktop/faqs/#can-i-pass-through-a-usb-device-to-a-container).
|
||||
|
||||
Install Docker following the [official documentation](https://docs.docker.com/get-docker/).
|
||||
|
||||
Once installed, you can clone MVT's repository and build its Docker image:
|
||||
Once Docker is installed, you can run MVT by downloading a prebuilt MVT Docker image, or by building a Docker image yourself from the MVT source repo.
|
||||
|
||||
### Using the prebuilt Docker image
|
||||
|
||||
```bash
|
||||
docker pull ghcr.io/mvt-project/mvt
|
||||
```
|
||||
|
||||
You can then run the Docker container with:
|
||||
|
||||
```
|
||||
docker run -it ghcr.io/mvt-project/mvt
|
||||
```
|
||||
|
||||
|
||||
### Build and run Docker image from source
|
||||
|
||||
```bash
|
||||
git clone https://github.com/mvt-project/mvt.git
|
||||
@@ -10,11 +25,6 @@ cd mvt
|
||||
docker build -t mvt .
|
||||
```
|
||||
|
||||
Optionally, you may need to specify your platform to Docker in order to build successfully (Apple M1)
|
||||
```bash
|
||||
docker build --platform amd64 -t mvt .
|
||||
```
|
||||
|
||||
Test if the image was created successfully:
|
||||
|
||||
```bash
|
||||
@@ -23,6 +33,9 @@ docker run -it mvt
|
||||
|
||||
If a prompt is spawned successfully, you can close it with `exit`.
|
||||
|
||||
|
||||
## Docker usage with Android devices
|
||||
|
||||
If you wish to use MVT to test an Android device you will need to enable the container's access to the host's USB devices. You can do so by enabling the `--privileged` flag and mounting the USB bus device as a volume:
|
||||
|
||||
```bash
|
||||
|
||||
BIN
docs/img/macos-backup2.png
Normal file
BIN
docs/img/macos-backup2.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 244 KiB |
BIN
docs/img/macos-backups.png
Normal file
BIN
docs/img/macos-backups.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 249 KiB |
@@ -6,6 +6,9 @@
|
||||
|
||||
Mobile Verification Toolkit (MVT) is a tool to facilitate the [consensual forensic analysis](introduction.md#consensual-forensics) of Android and iOS devices, for the purpose of identifying traces of compromise.
|
||||
|
||||
It has been developed and released by the [Amnesty International Security Lab](https://securitylab.amnesty.org) in July 2021 in the context of the [Pegasus Project](https://forbiddenstories.org/about-the-pegasus-project/) along with [a technical forensic methodology](https://www.amnesty.org/en/latest/research/2021/07/forensic-methodology-report-how-to-catch-nso-groups-pegasus/). It continues to be maintained by Amnesty International and other contributors.
|
||||
|
||||
|
||||
In this documentation you will find instructions on how to install and run the `mvt-ios` and `mvt-android` commands, and guidance on how to interpret the extracted results.
|
||||
|
||||
## Resources
|
||||
|
||||
@@ -7,11 +7,27 @@ Before proceeding, please note that MVT requires Python 3.6+ to run. While it sh
|
||||
First install some basic dependencies that will be necessary to build all required tools:
|
||||
|
||||
```bash
|
||||
sudo apt install python3 python3-pip libusb-1.0-0 sqlite3
|
||||
sudo apt install python3 python3-venv python3-pip sqlite3 libusb-1.0-0
|
||||
```
|
||||
|
||||
*libusb-1.0-0* is not required if you intend to only use `mvt-ios` and not `mvt-android`.
|
||||
|
||||
(Recommended) Set up `pipx`
|
||||
|
||||
For Ubuntu 23.04 or above:
|
||||
```bash
|
||||
sudo apt install pipx
|
||||
pipx ensurepath
|
||||
```
|
||||
|
||||
For Ubuntu 22.04 or below:
|
||||
```
|
||||
python3 -m pip install --user pipx
|
||||
python3 -m pipx ensurepath
|
||||
```
|
||||
|
||||
Other distributions: check for a `pipx` or `python-pipx` via your package manager.
|
||||
|
||||
When working with Android devices you should additionally install [Android SDK Platform Tools](https://developer.android.com/studio/releases/platform-tools). If you prefer to install a package made available by your distribution of choice, please make sure the version is recent to ensure compatibility with modern Android devices.
|
||||
|
||||
## Dependencies on macOS
|
||||
@@ -21,7 +37,7 @@ Running MVT on macOS requires Xcode and [homebrew](https://brew.sh) to be instal
|
||||
In order to install dependencies use:
|
||||
|
||||
```bash
|
||||
brew install python3 libusb sqlite3
|
||||
brew install python3 pipx libusb sqlite3
|
||||
```
|
||||
|
||||
*libusb* is not required if you intend to only use `mvt-ios` and not `mvt-android`.
|
||||
@@ -42,24 +58,47 @@ It is recommended to try installing and running MVT from [Windows Subsystem Linu
|
||||
|
||||
## Installing MVT
|
||||
|
||||
If you haven't done so, you can add this to your `.bashrc` or `.zshrc` file in order to add locally installed Pypi binaries to your `$PATH`:
|
||||
### Installing from PyPI with pipx (recommended)
|
||||
1. Install `pipx` following the instructions above for your OS/distribution. Make sure to run `pipx ensurepath` and open a new terminal window.
|
||||
2. ```bash
|
||||
pipx install mvt
|
||||
```
|
||||
|
||||
You now should have the `mvt-ios` and `mvt-android` utilities installed. If you run into problems with these commands not being found, ensure you have run `pipx ensurepath` and opened a new terminal window.
|
||||
|
||||
### Installing from PyPI directly into a virtual environment
|
||||
You can use `pipenv`, `poetry` etc. for your virtual environment, but the provided example is with the built-in `venv` tool:
|
||||
|
||||
1. Create the virtual environment in a folder in the current directory named `env`:
|
||||
```bash
|
||||
export PATH=$PATH:~/.local/bin
|
||||
python3 -m venv env
|
||||
```
|
||||
|
||||
Then you can install MVT directly from [pypi](https://pypi.org/project/mvt/)
|
||||
|
||||
2. Activate the virtual environment:
|
||||
```bash
|
||||
pip3 install mvt
|
||||
source env/bin/activate
|
||||
```
|
||||
|
||||
Or from the source code:
|
||||
3. Install `mvt` into the virtual environment:
|
||||
```bash
|
||||
pip install mvt
|
||||
```
|
||||
|
||||
The `mvt-ios` and `mvt-android` utilities should now be available as commands whenever the virtual environment is active.
|
||||
|
||||
### Installing from git source with pipx
|
||||
If you want to have the latest features in development, you can install MVT directly from the source code in git.
|
||||
|
||||
```bash
|
||||
git clone https://github.com/mvt-project/mvt.git
|
||||
cd mvt
|
||||
pip3 install .
|
||||
pipx install --force git+https://github.com/mvt-project/mvt.git
|
||||
```
|
||||
|
||||
You now should have the `mvt-ios` and `mvt-android` utilities installed.
|
||||
|
||||
**Notes:**
|
||||
1. The `--force` flag is necessary to force the reinstallation of the package.
|
||||
2. To revert to using a PyPI version, it will be necessary to `pipx uninstall mvt` first.
|
||||
|
||||
## Setting up command completions
|
||||
|
||||
See ["Command completions"](command_completion.md)
|
||||
|
||||
@@ -12,6 +12,20 @@ Mobile Verification Toolkit (MVT) is a collection of utilities designed to facil
|
||||
|
||||
MVT is a forensic research tool intended for technologists and investigators. Using it requires understanding the basics of forensic analysis and using command-line tools. MVT is not intended for end-user self-assessment. If you are concerned with the security of your device please seek expert assistance.
|
||||
|
||||
## Indicators of Compromise
|
||||
|
||||
MVT supports using [indicators of compromise (IOCs)](https://github.com/mvt-project/mvt-indicators) to scan mobile devices for potential traces of targeting or infection by known spyware campaigns. This includes IOCs published by [Amnesty International](https://github.com/AmnestyTech/investigations/) and other research groups.
|
||||
|
||||
!!! warning
|
||||
Public indicators of compromise are insufficient to determine that a device is "clean", and not targeted with a particular spyware tool. Reliance on public indicators alone can miss recent forensic traces and give a false sense of security.
|
||||
|
||||
Reliable and comprehensive digital forensic support and triage requires access to non-public indicators, research and threat intelligence.
|
||||
|
||||
Such support is available to civil society through [Amnesty International's Security Lab](https://securitylab.amnesty.org/get-help/?c=mvt_docs) or [Access Now’s Digital Security Helpline](https://www.accessnow.org/help/).
|
||||
|
||||
More information about using indicators of compromise with MVT is available in the [documentation](iocs.md).
|
||||
|
||||
|
||||
## Consensual Forensics
|
||||
|
||||
While MVT is capable of extracting and processing various types of very personal records typically found on a mobile phone (such as calls history, SMS and WhatsApp messages, etc.), this is intended to help identify potential attack vectors such as malicious SMS messages leading to exploitation.
|
||||
|
||||
16
docs/iocs.md
16
docs/iocs.md
@@ -34,13 +34,25 @@ It is also possible to load STIX2 files automatically from the environment varia
|
||||
export MVT_STIX2="/home/user/IOC1.stix2:/home/user/IOC2.stix2"
|
||||
```
|
||||
|
||||
## STIX2 Support
|
||||
|
||||
So far MVT implements only a subset of [STIX2 specifications](https://docs.oasis-open.org/cti/stix/v2.1/csprd01/stix-v2.1-csprd01.html):
|
||||
|
||||
* It only supports checks for one value (such as `[domain-name:value='DOMAIN']`) and not boolean expressions over multiple comparisons
|
||||
* It only supports the following types: `domain-name:value`, `process:name`, `email-addr:value`, `file:name`, `file:path`, `file:hashes.md5`, `file:hashes.sha1`, `file:hashes.sha256`, `app:id`, `configuration-profile:id`, `android-property:name`, `url:value` (but each type will only be checked by a module if it is relevant to the type of data obtained)
|
||||
|
||||
## Known repositories of STIX2 IOCs
|
||||
|
||||
- The [Amnesty International investigations repository](https://github.com/AmnestyTech/investigations) contains STIX-formatted IOCs for:
|
||||
- [Pegasus](https://en.wikipedia.org/wiki/Pegasus_(spyware)) ([STIX2](https://raw.githubusercontent.com/AmnestyTech/investigations/master/2021-07-18_nso/pegasus.stix2))
|
||||
- [Predator from Cytrox](https://citizenlab.ca/2021/12/pegasus-vs-predator-dissidents-doubly-infected-iphone-reveals-cytrox-mercenary-spyware/) ([STIX2](https://raw.githubusercontent.com/AmnestyTech/investigations/master/2021-12-16_cytrox/cytrox.stix2))
|
||||
- [This repository](https://github.com/Te-k/stalkerware-indicators) contains IOCs for Android stalkerware including [a STIX MVT-compatible file](https://raw.githubusercontent.com/Te-k/stalkerware-indicators/master/stalkerware.stix2).
|
||||
- [An Android Spyware Campaign Linked to a Mercenary Company](https://github.com/AmnestyTech/investigations/tree/master/2023-03-29_android_campaign) ([STIX2](https://github.com/AmnestyTech/investigations/blob/master/2023-03-29_android_campaign/malware.stix2))
|
||||
- [This repository](https://github.com/Te-k/stalkerware-indicators) contains IOCs for Android stalkerware including [a STIX MVT-compatible file](https://raw.githubusercontent.com/Te-k/stalkerware-indicators/master/generated/stalkerware.stix2).
|
||||
- We are also maintaining [a list of IOCs](https://github.com/mvt-project/mvt-indicators) in STIX format from public spyware campaigns.
|
||||
|
||||
You can automaticallly download the latest public indicator files with the command `mvt-ios download-iocs` or `mvt-android download-iocs`. These commands download the list of indicators listed [here](https://github.com/mvt-project/mvt/blob/main/public_indicators.json) and store them in the [appdir](https://pypi.org/project/appdirs/) folder. They are then loaded automatically by mvt.
|
||||
You can automaticallly download the latest public indicator files with the command `mvt-ios download-iocs` or `mvt-android download-iocs`. These commands download the list of indicators from the [mvt-indicators](https://github.com/mvt-project/mvt-indicators/blob/main/indicators.yaml) repository and store them in the [appdir](https://pypi.org/project/appdirs/) folder. They are then loaded automatically by MVT.
|
||||
|
||||
Please [open an issue](https://github.com/mvt-project/mvt/issues/) to suggest new sources of STIX-formatted IOCs.
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,16 +1,41 @@
|
||||
# Backup with iTunes app
|
||||
|
||||
It is possible to do an iPhone backup by using iTunes on Windows or macOS computers (in most recent versions of macOS, this feature is included in Finder).
|
||||
It is possible to do an iPhone backup by using iTunes on Windows or macOS computers (in most recent versions of macOS, this feature is included in Finder, see below).
|
||||
|
||||
To do that:
|
||||
|
||||
* Make sure iTunes is installed.
|
||||
* Connect your iPhone to your computer using a Lightning/USB cable.
|
||||
* Open the device in iTunes (or Finder on macOS).
|
||||
* If you want to have a more accurate detection, ensure that the encrypted backup option is activated and choose a secure password for the backup.
|
||||
* Start the backup and wait for it to finish (this may take up to 30 minutes).
|
||||
1. Make sure iTunes is installed.
|
||||
2. Connect your iPhone to your computer using a Lightning/USB cable.
|
||||
3. Open the device in iTunes (or Finder on macOS).
|
||||
4. If you want to have a more accurate detection, ensure that the encrypted backup option is activated and choose a secure password for the backup.
|
||||
5. Start the backup and wait for it to finish (this may take up to 30 minutes).
|
||||
|
||||

|
||||

|
||||
_Source: [Apple Support](https://support.apple.com/en-us/HT211229)_
|
||||
|
||||
* Once the backup is done, find its location and copy it to a place where it can be analyzed by MVT. On Windows, the backup can be stored either in `%USERPROFILE%\Apple\MobileSync\` or `%USERPROFILE%\AppData\Roaming\Apple Computer\MobileSync\`. On macOS, the backup is stored in `~/Library/Application Support/MobileSync/`.
|
||||
Once the backup is done, find its location and copy it to a place where it can be analyzed by MVT. On Windows, the backup can be stored either in `%USERPROFILE%\Apple\MobileSync\` or `%USERPROFILE%\AppData\Roaming\Apple Computer\MobileSync\`. On macOS, the backup is stored in `~/Library/Application Support/MobileSync/`.
|
||||
|
||||
# Backup with Finder
|
||||
|
||||
On more recent MacOS versions, this feature is included in Finder. To do a backup:
|
||||
|
||||
1. Launch Finder on your Mac.
|
||||
2. Connect your iPhone to your Mac using a Lightning/USB cable.
|
||||
3. Select your device from the list of devices located at the bottom of the left side bar labeled "locations".
|
||||
4. In the General tab, select `Back up all the data on your iPhone to this Mac` from the options under the Backups section.
|
||||
5. Check the box that says `Encrypt local backup`. If it is your first time selecting this option, you may need to enter a password to encrypt the backup.
|
||||
|
||||

|
||||
_Source: [Apple Support](https://support.apple.com/en-us/HT211229)_
|
||||
|
||||
6. Click `Back Up Now` to start the back-up process.
|
||||
7. The encrypted backup for your iPhone should now start. Once the process finishes, you can check the backup by opening `Finder`, clicking on the `General` tab, then click on `Manage Backups`. Now you should see a list of your backups like the image below:
|
||||
|
||||

|
||||
_Source: [Apple Support](https://support.apple.com/en-us/HT211229)_
|
||||
|
||||
If your backup has a lock next to it like in the image above, then the backup is encrypted. You should also see the date and time when the encrypted backup was created. The backup files are stored in `~/Library/Application Support/MobileSync/`.
|
||||
|
||||
## Notes:
|
||||
|
||||
- Remember to keep the backup encryption password that you created safe, since without it you will not be able to access/modify/decrypt the backup file.
|
||||
|
||||
@@ -3,10 +3,10 @@
|
||||
If you have correctly [installed libimobiledevice](../install.md) you can easily generate an iTunes backup using the `idevicebackup2` tool included in the suite. First, you might want to ensure that backup encryption is enabled (**note: encrypted backup contain more data than unencrypted backups**):
|
||||
|
||||
```bash
|
||||
idevicebackup2 -i backup encryption on
|
||||
idevicebackup2 -i encryption on
|
||||
```
|
||||
|
||||
Note that if a backup password was previously set on this device, you might need to use the same or change it. You can try changing password using `idevicebackup2 -i backup changepw`, or by turning off encryption (`idevicebackup2 -i backup encryption off`) and turning it back on again.
|
||||
Note that if a backup password was previously set on this device, you might need to use the same or change it. You can try changing password using `idevicebackup2 -i changepw`, or by turning off encryption (`idevicebackup2 -i encryption off`) and turning it back on again.
|
||||
|
||||
If you are not able to recover or change the password, you should try to disable encryption and obtain an unencrypted backup.
|
||||
|
||||
|
||||
@@ -45,10 +45,10 @@ Once the idevice tools are available you can check if everything works fine by c
|
||||
ideviceinfo
|
||||
```
|
||||
|
||||
This should some many details on the connected iOS device. If you are connecting the device to your laptop for the first time, it will require to unlock and enter the PIN code on the mobile device. If it complains that no device is connected and the mobile device is indeed plugged in through the USB cable, you might need to do this first, although typically the pairing is automatically done when connecting the device:
|
||||
This should show many details on the connected iOS device. If you are connecting the device to your laptop for the first time, it will require to unlock and enter the PIN code on the mobile device. If it complains that no device is connected and the mobile device is indeed plugged in through the USB cable, you might need to do this first, although typically the pairing is automatically done when connecting the device:
|
||||
|
||||
```bash
|
||||
sudo usbmuxd -f -d
|
||||
sudo usbmuxd -f -v
|
||||
idevicepair pair
|
||||
```
|
||||
|
||||
|
||||
@@ -16,9 +16,21 @@ If indicators are provided through the command-line, processes and domains are c
|
||||
|
||||
---
|
||||
|
||||
### `applications.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `Applications` module. The module extracts the list of applications installed on the device from the `Info.plist` file in backup, or from the `iTunesMetadata.plist` files in a file system dump. These records contains detailed information on the source and installation of the app.
|
||||
|
||||
If indicators are provided through the command-line, processes and application ids are checked against the app name of each application. It also flags any applications not installed from the AppStore. Any matches are stored in *applications_detected.json*.
|
||||
|
||||
---
|
||||
|
||||
### `backup_info.json`
|
||||
|
||||
!!! info "Availabiliy"
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-close:
|
||||
|
||||
@@ -38,6 +50,18 @@ If indicators are provided through the command-line, they are checked against th
|
||||
|
||||
---
|
||||
|
||||
### `calendar.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `Calendar` module. This module extracts all CalendarItems from the `Calendar.sqlitedb` database. This database contains all calendar entries from the different calendars installed on the phone.
|
||||
|
||||
If indicators are provided through the command-line, email addresses are checked against the inviter's email of the different events. Any matches are stored in *calendar_detected.json*.
|
||||
|
||||
---
|
||||
|
||||
### `calls.json`
|
||||
|
||||
!!! info "Availability"
|
||||
@@ -118,6 +142,16 @@ If indicators are provided through the command-line, they are checked against th
|
||||
|
||||
---
|
||||
|
||||
### `global_preferences.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `GlobalPreferences` module. The module extracts records from a Plist file located at */private/var/mobile/Library/Preferences/.GlobalPreferences.plist*, which contains a system preferences including if Lockdown Mode is enabled.
|
||||
|
||||
---
|
||||
|
||||
### `id_status_cache.json`
|
||||
|
||||
!!! info "Availability"
|
||||
@@ -272,7 +306,7 @@ If indicators are provided through the command-line, they are checked against th
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `SMS` module. The module extracts a list of SMS messages containing HTTP links from the SQLite database located at */private/var/mobile/Library/SMS/sms.db*.
|
||||
This JSON file is created by mvt-ios' `SMS` module. The module extracts a list of SMS messages from the SQLite database located at */private/var/mobile/Library/SMS/sms.db*.
|
||||
|
||||
If indicators are provided through the command-line, they are checked against the extracted HTTP links. Any matches are stored in *sms_detected.json*.
|
||||
|
||||
@@ -374,7 +408,7 @@ If indicators are provided through the command-line, they are checked against th
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `WhatsApp` module. The module extracts a list of WhatsApp messages containing HTTP links from the SQLite database located at *private/var/mobile/Containers/Shared/AppGroup/\*/ChatStorage.sqlite*.
|
||||
This JSON file is created by mvt-ios' `WhatsApp` module. The module extracts a list of WhatsApp messages from the SQLite database located at *private/var/mobile/Containers/Shared/AppGroup/\*/ChatStorage.sqlite*.
|
||||
|
||||
If indicators are provided through the command-line, they are checked against the extracted HTTP links. Any matches are stored in *whatsapp_detected.json*.
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
mkdocs==1.2.3
|
||||
mkdocs-autorefs
|
||||
mkdocs-material
|
||||
mkdocs-material-extensions
|
||||
mkdocstrings
|
||||
mkdocs==1.6.1
|
||||
mkdocs-autorefs==1.4.3
|
||||
mkdocs-material==9.6.20
|
||||
mkdocs-material-extensions==1.3.1
|
||||
mkdocstrings==0.30.1
|
||||
@@ -1,14 +1,14 @@
|
||||
site_name: Mobile Verification Toolkit
|
||||
repo_url: https://github.com/mvt-project/mvt
|
||||
edit_uri: edit/main/docs/
|
||||
copyright: Copyright © 2021 MVT Project Developers
|
||||
copyright: Copyright © 2021-2023 MVT Project Developers
|
||||
site_description: Mobile Verification Toolkit Documentation
|
||||
markdown_extensions:
|
||||
- attr_list
|
||||
- admonition
|
||||
- pymdownx.emoji:
|
||||
emoji_index: !!python/name:materialx.emoji.twemoji
|
||||
emoji_generator: !!python/name:materialx.emoji.to_svg
|
||||
emoji_index: !!python/name:material.extensions.emoji.twemoji
|
||||
emoji_generator: !!python/name:material.extensions.emoji.to_svg
|
||||
- pymdownx.superfences
|
||||
- pymdownx.inlinehilite
|
||||
- pymdownx.highlight:
|
||||
@@ -46,4 +46,5 @@ nav:
|
||||
- Check an Android Backup (SMS messages): "android/backup.md"
|
||||
- Download APKs: "android/download_apks.md"
|
||||
- Indicators of Compromise: "iocs.md"
|
||||
- Development: "development.md"
|
||||
- License: "license.md"
|
||||
|
||||
@@ -1,391 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import getpass
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import tarfile
|
||||
from pathlib import Path
|
||||
from zipfile import ZipFile
|
||||
|
||||
import click
|
||||
from rich.logging import RichHandler
|
||||
|
||||
from mvt.android.parsers.backup import (AndroidBackupParsingError,
|
||||
InvalidBackupPassword, parse_ab_header,
|
||||
parse_backup_file)
|
||||
from mvt.common.help import (HELP_MSG_FAST, HELP_MSG_IOC,
|
||||
HELP_MSG_LIST_MODULES, HELP_MSG_MODULE,
|
||||
HELP_MSG_OUTPUT, HELP_MSG_SERIAL)
|
||||
from mvt.common.indicators import Indicators, download_indicators_files
|
||||
from mvt.common.logo import logo
|
||||
from mvt.common.module import run_module, save_timeline
|
||||
|
||||
from .download_apks import DownloadAPKs
|
||||
from .lookups.koodous import koodous_lookup
|
||||
from .lookups.virustotal import virustotal_lookup
|
||||
from .modules.adb import ADB_MODULES
|
||||
from .modules.backup import BACKUP_MODULES
|
||||
from .modules.bugreport import BUGREPORT_MODULES
|
||||
|
||||
# Setup logging using Rich.
|
||||
LOG_FORMAT = "[%(name)s] %(message)s"
|
||||
logging.basicConfig(level="INFO", format=LOG_FORMAT, handlers=[
|
||||
RichHandler(show_path=False, log_time_format="%X")])
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Main
|
||||
#==============================================================================
|
||||
@click.group(invoke_without_command=False)
|
||||
def cli():
|
||||
logo()
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Command: version
|
||||
#==============================================================================
|
||||
@cli.command("version", help="Show the currently installed version of MVT")
|
||||
def version():
|
||||
return
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Command: download-apks
|
||||
#==============================================================================
|
||||
@cli.command("download-apks", help="Download all or only non-system installed APKs")
|
||||
@click.option("--serial", "-s", type=str, help=HELP_MSG_SERIAL)
|
||||
@click.option("--all-apks", "-a", is_flag=True,
|
||||
help="Extract all packages installed on the phone, including system packages")
|
||||
@click.option("--virustotal", "-v", is_flag=True, help="Check packages on VirusTotal")
|
||||
@click.option("--koodous", "-k", is_flag=True, help="Check packages on Koodous")
|
||||
@click.option("--all-checks", "-A", is_flag=True, help="Run all available checks")
|
||||
@click.option("--output", "-o", type=click.Path(exists=False),
|
||||
help="Specify a path to a folder where you want to store the APKs")
|
||||
@click.option("--from-file", "-f", type=click.Path(exists=True),
|
||||
help="Instead of acquiring from phone, load an existing packages.json file for lookups (mainly for debug purposes)")
|
||||
@click.pass_context
|
||||
def download_apks(ctx, all_apks, virustotal, koodous, all_checks, output, from_file, serial):
|
||||
try:
|
||||
if from_file:
|
||||
download = DownloadAPKs.from_json(from_file)
|
||||
else:
|
||||
# TODO: Do we actually want to be able to run without storing any file?
|
||||
if not output:
|
||||
log.critical("You need to specify an output folder with --output!")
|
||||
ctx.exit(1)
|
||||
|
||||
if not os.path.exists(output):
|
||||
try:
|
||||
os.makedirs(output)
|
||||
except Exception as e:
|
||||
log.critical("Unable to create output folder %s: %s", output, e)
|
||||
ctx.exit(1)
|
||||
|
||||
download = DownloadAPKs(output_folder=output, all_apks=all_apks,
|
||||
log=logging.getLogger(DownloadAPKs.__module__))
|
||||
if serial:
|
||||
download.serial = serial
|
||||
download.run()
|
||||
|
||||
packages = download.packages
|
||||
|
||||
if len(packages) == 0:
|
||||
return
|
||||
|
||||
if virustotal or all_checks:
|
||||
virustotal_lookup(packages)
|
||||
|
||||
if koodous or all_checks:
|
||||
koodous_lookup(packages)
|
||||
except KeyboardInterrupt:
|
||||
print("")
|
||||
ctx.exit(1)
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Command: check-adb
|
||||
#==============================================================================
|
||||
@cli.command("check-adb", help="Check an Android device over adb")
|
||||
@click.option("--serial", "-s", type=str, help=HELP_MSG_SERIAL)
|
||||
@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True,
|
||||
default=[], help=HELP_MSG_IOC)
|
||||
@click.option("--output", "-o", type=click.Path(exists=False),
|
||||
help=HELP_MSG_OUTPUT)
|
||||
@click.option("--fast", "-f", is_flag=True, help=HELP_MSG_FAST)
|
||||
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
|
||||
@click.option("--module", "-m", help=HELP_MSG_MODULE)
|
||||
@click.pass_context
|
||||
def check_adb(ctx, iocs, output, fast, list_modules, module, serial):
|
||||
if list_modules:
|
||||
log.info("Following is the list of available check-adb modules:")
|
||||
for adb_module in ADB_MODULES:
|
||||
log.info(" - %s", adb_module.__name__)
|
||||
|
||||
return
|
||||
|
||||
log.info("Checking Android through adb bridge")
|
||||
|
||||
if output and not os.path.exists(output):
|
||||
try:
|
||||
os.makedirs(output)
|
||||
except Exception as e:
|
||||
log.critical("Unable to create output folder %s: %s", output, e)
|
||||
ctx.exit(1)
|
||||
|
||||
indicators = Indicators(log=log)
|
||||
indicators.load_indicators_files(iocs)
|
||||
|
||||
timeline = []
|
||||
timeline_detected = []
|
||||
for adb_module in ADB_MODULES:
|
||||
if module and adb_module.__name__ != module:
|
||||
continue
|
||||
|
||||
m = adb_module(output_folder=output, fast_mode=fast,
|
||||
log=logging.getLogger(adb_module.__module__))
|
||||
if indicators.total_ioc_count:
|
||||
m.indicators = indicators
|
||||
m.indicators.log = m.log
|
||||
if serial:
|
||||
m.serial = serial
|
||||
|
||||
run_module(m)
|
||||
timeline.extend(m.timeline)
|
||||
timeline_detected.extend(m.timeline_detected)
|
||||
|
||||
if output:
|
||||
if len(timeline) > 0:
|
||||
save_timeline(timeline, os.path.join(output, "timeline.csv"))
|
||||
if len(timeline_detected) > 0:
|
||||
save_timeline(timeline_detected, os.path.join(output, "timeline_detected.csv"))
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Command: check-bugreport
|
||||
#==============================================================================
|
||||
@cli.command("check-bugreport", help="Check an Android Bug Report")
|
||||
@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True,
|
||||
default=[], help=HELP_MSG_IOC)
|
||||
@click.option("--output", "-o", type=click.Path(exists=False), help=HELP_MSG_OUTPUT)
|
||||
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
|
||||
@click.option("--module", "-m", help=HELP_MSG_MODULE)
|
||||
@click.argument("BUGREPORT_PATH", type=click.Path(exists=True))
|
||||
@click.pass_context
|
||||
def check_bugreport(ctx, iocs, output, list_modules, module, bugreport_path):
|
||||
if list_modules:
|
||||
log.info("Following is the list of available check-bugreport modules:")
|
||||
for adb_module in BUGREPORT_MODULES:
|
||||
log.info(" - %s", adb_module.__name__)
|
||||
|
||||
return
|
||||
|
||||
log.info("Checking an Android Bug Report located at: %s", bugreport_path)
|
||||
|
||||
if output and not os.path.exists(output):
|
||||
try:
|
||||
os.makedirs(output)
|
||||
except Exception as e:
|
||||
log.critical("Unable to create output folder %s: %s", output, e)
|
||||
ctx.exit(1)
|
||||
|
||||
indicators = Indicators(log=log)
|
||||
indicators.load_indicators_files(iocs)
|
||||
|
||||
if os.path.isfile(bugreport_path):
|
||||
bugreport_format = "zip"
|
||||
zip_archive = ZipFile(bugreport_path)
|
||||
zip_files = []
|
||||
for file_name in zip_archive.namelist():
|
||||
zip_files.append(file_name)
|
||||
elif os.path.isdir(bugreport_path):
|
||||
bugreport_format = "dir"
|
||||
folder_files = []
|
||||
parent_path = Path(bugreport_path).absolute().as_posix()
|
||||
for root, subdirs, subfiles in os.walk(os.path.abspath(bugreport_path)):
|
||||
for file_name in subfiles:
|
||||
folder_files.append(os.path.relpath(os.path.join(root, file_name), parent_path))
|
||||
|
||||
timeline = []
|
||||
timeline_detected = []
|
||||
for bugreport_module in BUGREPORT_MODULES:
|
||||
if module and bugreport_module.__name__ != module:
|
||||
continue
|
||||
|
||||
m = bugreport_module(base_folder=bugreport_path, output_folder=output,
|
||||
log=logging.getLogger(bugreport_module.__module__))
|
||||
|
||||
if bugreport_format == "zip":
|
||||
m.from_zip(zip_archive, zip_files)
|
||||
else:
|
||||
m.from_folder(bugreport_path, folder_files)
|
||||
|
||||
if indicators.total_ioc_count:
|
||||
m.indicators = indicators
|
||||
m.indicators.log = m.log
|
||||
|
||||
run_module(m)
|
||||
timeline.extend(m.timeline)
|
||||
timeline_detected.extend(m.timeline_detected)
|
||||
|
||||
if output:
|
||||
if len(timeline) > 0:
|
||||
save_timeline(timeline, os.path.join(output, "timeline.csv"))
|
||||
if len(timeline_detected) > 0:
|
||||
save_timeline(timeline_detected, os.path.join(output, "timeline_detected.csv"))
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Command: check-backup
|
||||
#==============================================================================
|
||||
@cli.command("check-backup", help="Check an Android Backup")
|
||||
@click.option("--serial", "-s", type=str, help=HELP_MSG_SERIAL)
|
||||
@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True,
|
||||
default=[], help=HELP_MSG_IOC)
|
||||
@click.option("--output", "-o", type=click.Path(exists=False), help=HELP_MSG_OUTPUT)
|
||||
@click.argument("BACKUP_PATH", type=click.Path(exists=True))
|
||||
@click.pass_context
|
||||
def check_backup(ctx, iocs, output, backup_path, serial):
|
||||
log.info("Checking ADB backup located at: %s", backup_path)
|
||||
|
||||
if os.path.isfile(backup_path):
|
||||
# AB File
|
||||
backup_type = "ab"
|
||||
with open(backup_path, "rb") as handle:
|
||||
data = handle.read()
|
||||
header = parse_ab_header(data)
|
||||
if not header["backup"]:
|
||||
log.critical("Invalid backup format, file should be in .ab format")
|
||||
ctx.exit(1)
|
||||
password = None
|
||||
if header["encryption"] != "none":
|
||||
password = getpass.getpass(prompt="Backup Password: ", stream=None)
|
||||
try:
|
||||
tardata = parse_backup_file(data, password=password)
|
||||
except InvalidBackupPassword:
|
||||
log.critical("Invalid backup password")
|
||||
ctx.exit(1)
|
||||
except AndroidBackupParsingError:
|
||||
log.critical("Impossible to parse this backup file, please use Android Backup Extractor instead")
|
||||
ctx.exit(1)
|
||||
|
||||
dbytes = io.BytesIO(tardata)
|
||||
tar = tarfile.open(fileobj=dbytes)
|
||||
files = []
|
||||
for member in tar:
|
||||
files.append(member.name)
|
||||
|
||||
elif os.path.isdir(backup_path):
|
||||
backup_type = "folder"
|
||||
backup_path = Path(backup_path).absolute().as_posix()
|
||||
files = []
|
||||
for root, subdirs, subfiles in os.walk(os.path.abspath(backup_path)):
|
||||
for fname in subfiles:
|
||||
files.append(os.path.relpath(os.path.join(root, fname), backup_path))
|
||||
else:
|
||||
log.critical("Invalid backup path, path should be a folder or an Android Backup (.ab) file")
|
||||
ctx.exit(1)
|
||||
|
||||
if output and not os.path.exists(output):
|
||||
try:
|
||||
os.makedirs(output)
|
||||
except Exception as e:
|
||||
log.critical("Unable to create output folder %s: %s", output, e)
|
||||
ctx.exit(1)
|
||||
|
||||
indicators = Indicators(log=log)
|
||||
indicators.load_indicators_files(iocs)
|
||||
|
||||
for module in BACKUP_MODULES:
|
||||
m = module(base_folder=backup_path, output_folder=output,
|
||||
log=logging.getLogger(module.__module__))
|
||||
if indicators.total_ioc_count:
|
||||
m.indicators = indicators
|
||||
m.indicators.log = m.log
|
||||
if serial:
|
||||
m.serial = serial
|
||||
|
||||
if backup_type == "folder":
|
||||
m.from_folder(backup_path, files)
|
||||
else:
|
||||
m.from_ab(backup_path, tar, files)
|
||||
|
||||
run_module(m)
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Command: check-iocs
|
||||
#==============================================================================
|
||||
@cli.command("check-iocs", help="Compare stored JSON results to provided indicators")
|
||||
@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True,
|
||||
default=[], help=HELP_MSG_IOC)
|
||||
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
|
||||
@click.option("--module", "-m", help=HELP_MSG_MODULE)
|
||||
@click.argument("FOLDER", type=click.Path(exists=True))
|
||||
@click.pass_context
|
||||
def check_iocs(ctx, iocs, list_modules, module, folder):
|
||||
all_modules = []
|
||||
for entry in BACKUP_MODULES + ADB_MODULES:
|
||||
if entry not in all_modules:
|
||||
all_modules.append(entry)
|
||||
|
||||
if list_modules:
|
||||
log.info("Following is the list of available check-iocs modules:")
|
||||
for iocs_module in all_modules:
|
||||
log.info(" - %s", iocs_module.__name__)
|
||||
|
||||
return
|
||||
|
||||
log.info("Checking stored results against provided indicators...")
|
||||
|
||||
indicators = Indicators(log=log)
|
||||
indicators.load_indicators_files(iocs)
|
||||
|
||||
total_detections = 0
|
||||
for file_name in os.listdir(folder):
|
||||
name_only, ext = os.path.splitext(file_name)
|
||||
file_path = os.path.join(folder, file_name)
|
||||
|
||||
# TODO: Skipping processing of result files that are not json.
|
||||
# We might want to revisit this eventually.
|
||||
if ext != ".json":
|
||||
continue
|
||||
|
||||
for iocs_module in all_modules:
|
||||
if module and iocs_module.__name__ != module:
|
||||
continue
|
||||
|
||||
if iocs_module().get_slug() != name_only:
|
||||
continue
|
||||
|
||||
log.info("Loading results from \"%s\" with module %s", file_name,
|
||||
iocs_module.__name__)
|
||||
|
||||
m = iocs_module.from_json(file_path,
|
||||
log=logging.getLogger(iocs_module.__module__))
|
||||
if indicators.total_ioc_count > 0:
|
||||
m.indicators = indicators
|
||||
m.indicators.log = m.log
|
||||
|
||||
try:
|
||||
m.check_indicators()
|
||||
except NotImplementedError:
|
||||
continue
|
||||
else:
|
||||
total_detections += len(m.detected)
|
||||
|
||||
if total_detections > 0:
|
||||
log.warning("The check of the results produced %d detections!",
|
||||
total_detections)
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Command: download-iocs
|
||||
#==============================================================================
|
||||
@cli.command("download-iocs", help="Download public STIX2 indicators")
|
||||
def download_indicators():
|
||||
download_indicators_files(log)
|
||||
@@ -1,10 +0,0 @@
|
||||
su
|
||||
busybox
|
||||
supersu
|
||||
Superuser.apk
|
||||
KingoUser.apk
|
||||
SuperSu.apk
|
||||
magisk
|
||||
magiskhide
|
||||
magiskinit
|
||||
magiskpolicy
|
||||
@@ -1,25 +0,0 @@
|
||||
com.noshufou.android.su
|
||||
com.noshufou.android.su.elite
|
||||
eu.chainfire.supersu
|
||||
com.koushikdutta.superuser
|
||||
com.thirdparty.superuser
|
||||
com.yellowes.su
|
||||
com.koushikdutta.rommanager
|
||||
com.koushikdutta.rommanager.license
|
||||
com.dimonvideo.luckypatcher
|
||||
com.chelpus.lackypatch
|
||||
com.ramdroid.appquarantine
|
||||
com.ramdroid.appquarantinepro
|
||||
com.devadvance.rootcloak
|
||||
com.devadvance.rootcloakplus
|
||||
de.robv.android.xposed.installer
|
||||
com.saurik.substrate
|
||||
com.zachspong.temprootremovejb
|
||||
com.amphoras.hidemyroot
|
||||
com.amphoras.hidemyrootadfree
|
||||
com.formyhm.hiderootPremium
|
||||
com.formyhm.hideroot
|
||||
me.phh.superuser
|
||||
eu.chainfire.supersu.pro
|
||||
com.kingouser.com
|
||||
com.topjohnwu.magisk
|
||||
@@ -1,58 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
import requests
|
||||
from rich.console import Console
|
||||
from rich.progress import track
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def koodous_lookup(packages):
|
||||
log.info("Looking up all extracted files on Koodous (www.koodous.com)")
|
||||
log.info("This might take a while...")
|
||||
|
||||
table = Table(title="Koodous Packages Detections")
|
||||
table.add_column("Package name")
|
||||
table.add_column("File name")
|
||||
table.add_column("Trusted")
|
||||
table.add_column("Detected")
|
||||
table.add_column("Rating")
|
||||
|
||||
total_packages = len(packages)
|
||||
for i in track(range(total_packages), description=f"Looking up {total_packages} packages..."):
|
||||
package = packages[i]
|
||||
for file in package.get("files", []):
|
||||
url = f"https://api.koodous.com/apks/{file['sha256']}"
|
||||
res = requests.get(url)
|
||||
report = res.json()
|
||||
|
||||
row = [package["package_name"], file["path"]]
|
||||
|
||||
if "package_name" in report:
|
||||
trusted = "no"
|
||||
if report["trusted"]:
|
||||
trusted = Text("yes", "green bold")
|
||||
|
||||
detected = "no"
|
||||
if report["detected"]:
|
||||
detected = Text("yes", "red bold")
|
||||
|
||||
rating = "0"
|
||||
if int(report["rating"]) < 0:
|
||||
rating = Text(str(report["rating"]), "red bold")
|
||||
|
||||
row.extend([trusted, detected, rating])
|
||||
else:
|
||||
row.extend(["n/a", "n/a", "n/a"])
|
||||
|
||||
table.add_row(*row)
|
||||
|
||||
console = Console()
|
||||
console.print(table)
|
||||
@@ -1,100 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
import requests
|
||||
from rich.console import Console
|
||||
from rich.progress import track
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_virustotal_report(hashes):
|
||||
apikey = "233f22e200ca5822bd91103043ccac138b910db79f29af5616a9afe8b6f215ad"
|
||||
url = f"https://www.virustotal.com/partners/sysinternals/file-reports?apikey={apikey}"
|
||||
|
||||
items = []
|
||||
for sha256 in hashes:
|
||||
items.append({
|
||||
"autostart_location": "",
|
||||
"autostart_entry": "",
|
||||
"hash": sha256,
|
||||
"local_name": "",
|
||||
"creation_datetime": "",
|
||||
})
|
||||
headers = {"User-Agent": "VirusTotal", "Content-Type": "application/json"}
|
||||
res = requests.post(url, headers=headers, json=items)
|
||||
|
||||
if res.status_code == 200:
|
||||
report = res.json()
|
||||
return report["data"]
|
||||
else:
|
||||
log.error("Unexpected response from VirusTotal: %s", res.status_code)
|
||||
return None
|
||||
|
||||
|
||||
def virustotal_lookup(packages):
|
||||
# NOTE: This is temporary, until we resolved the issue.
|
||||
log.error("Unfortunately VirusTotal lookup is disabled until further notice, due to unresolved issues with the API service.")
|
||||
return
|
||||
|
||||
log.info("Looking up all extracted files on VirusTotal (www.virustotal.com)")
|
||||
|
||||
unique_hashes = []
|
||||
for package in packages:
|
||||
for file in package.get("files", []):
|
||||
if file["sha256"] not in unique_hashes:
|
||||
unique_hashes.append(file["sha256"])
|
||||
|
||||
total_unique_hashes = len(unique_hashes)
|
||||
|
||||
detections = {}
|
||||
|
||||
def virustotal_query(batch):
|
||||
report = get_virustotal_report(batch)
|
||||
if not report:
|
||||
return
|
||||
|
||||
for entry in report:
|
||||
if entry["hash"] not in detections and entry["found"] is True:
|
||||
detections[entry["hash"]] = entry["detection_ratio"]
|
||||
|
||||
batch = []
|
||||
for i in track(range(total_unique_hashes), description=f"Looking up {total_unique_hashes} files..."):
|
||||
file_hash = unique_hashes[i]
|
||||
batch.append(file_hash)
|
||||
if len(batch) == 25:
|
||||
virustotal_query(batch)
|
||||
batch = []
|
||||
|
||||
if batch:
|
||||
virustotal_query(batch)
|
||||
|
||||
table = Table(title="VirusTotal Packages Detections")
|
||||
table.add_column("Package name")
|
||||
table.add_column("File path")
|
||||
table.add_column("Detections")
|
||||
|
||||
for package in packages:
|
||||
for file in package.get("files", []):
|
||||
row = [package["package_name"], file["path"]]
|
||||
|
||||
if file["sha256"] in detections:
|
||||
detection = detections[file["sha256"]]
|
||||
positives = detection.split("/")[0]
|
||||
if int(positives) > 0:
|
||||
row.append(Text(detection, "red bold"))
|
||||
else:
|
||||
row.append(detection)
|
||||
else:
|
||||
row.append("not found")
|
||||
|
||||
table.add_row(*row)
|
||||
|
||||
console = Console()
|
||||
console.print(table)
|
||||
@@ -1,29 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
from .chrome_history import ChromeHistory
|
||||
from .dumpsys_accessibility import DumpsysAccessibility
|
||||
from .dumpsys_activities import DumpsysActivities
|
||||
from .dumpsys_battery_daily import DumpsysBatteryDaily
|
||||
from .dumpsys_battery_history import DumpsysBatteryHistory
|
||||
from .dumpsys_dbinfo import DumpsysDBInfo
|
||||
from .dumpsys_full import DumpsysFull
|
||||
from .dumpsys_receivers import DumpsysReceivers
|
||||
from .files import Files
|
||||
from .getprop import Getprop
|
||||
from .logcat import Logcat
|
||||
from .packages import Packages
|
||||
from .processes import Processes
|
||||
from .root_binaries import RootBinaries
|
||||
from .selinux_status import SELinuxStatus
|
||||
from .settings import Settings
|
||||
from .sms import SMS
|
||||
from .whatsapp import Whatsapp
|
||||
|
||||
ADB_MODULES = [ChromeHistory, SMS, Whatsapp, Processes, Getprop, Settings,
|
||||
SELinuxStatus, DumpsysBatteryHistory, DumpsysBatteryDaily,
|
||||
DumpsysReceivers, DumpsysActivities, DumpsysAccessibility,
|
||||
DumpsysDBInfo, DumpsysFull, Packages, Logcat, RootBinaries,
|
||||
Files]
|
||||
@@ -1,85 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sqlite3
|
||||
|
||||
from mvt.common.utils import (convert_chrometime_to_unix,
|
||||
convert_timestamp_to_iso)
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
CHROME_HISTORY_PATH = "data/data/com.android.chrome/app_chrome/Default/History"
|
||||
|
||||
|
||||
class ChromeHistory(AndroidExtraction):
|
||||
"""This module extracts records from Android's Chrome browsing history."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
return {
|
||||
"timestamp": record["isodate"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "visit",
|
||||
"data": f"{record['id']} - {record['url']} (visit ID: {record['visit_id']}, redirect source: {record['redirect_source']})"
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
if self.indicators.check_domain(result["url"]):
|
||||
self.detected.append(result)
|
||||
|
||||
def _parse_db(self, db_path):
|
||||
"""Parse a Chrome History database file.
|
||||
|
||||
:param db_path: Path to the History database to process.
|
||||
|
||||
"""
|
||||
conn = sqlite3.connect(db_path)
|
||||
cur = conn.cursor()
|
||||
cur.execute("""
|
||||
SELECT
|
||||
urls.id,
|
||||
urls.url,
|
||||
visits.id,
|
||||
visits.visit_time,
|
||||
visits.from_visit
|
||||
FROM urls
|
||||
JOIN visits ON visits.url = urls.id
|
||||
ORDER BY visits.visit_time;
|
||||
""")
|
||||
|
||||
for item in cur:
|
||||
self.results.append({
|
||||
"id": item[0],
|
||||
"url": item[1],
|
||||
"visit_id": item[2],
|
||||
"timestamp": item[3],
|
||||
"isodate": convert_timestamp_to_iso(convert_chrometime_to_unix(item[3])),
|
||||
"redirect_source": item[4],
|
||||
})
|
||||
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
log.info("Extracted a total of %d history items", len(self.results))
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
self._adb_process_file(os.path.join("/", CHROME_HISTORY_PATH),
|
||||
self._parse_db)
|
||||
except Exception as e:
|
||||
self.log.error(e)
|
||||
@@ -1,45 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_accessibility
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DumpsysAccessibility(AndroidExtraction):
|
||||
"""This module extracts stats on accessibility."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_app_id(result["package_name"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
output = self._adb_command("dumpsys accessibility")
|
||||
self._adb_disconnect()
|
||||
|
||||
self.results = parse_dumpsys_accessibility(output)
|
||||
|
||||
for result in self.results:
|
||||
log.info("Found installed accessibility service \"%s\"", result.get("service"))
|
||||
|
||||
self.log.info("Identified a total of %d accessibility services", len(self.results))
|
||||
@@ -1,45 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_activity_resolver_table
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DumpsysActivities(AndroidExtraction):
|
||||
"""This module extracts details on receivers for risky activities."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.results = results if results else {}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for intent, activities in self.results.items():
|
||||
for activity in activities:
|
||||
ioc = self.indicators.check_app_id(activity["package_name"])
|
||||
if ioc:
|
||||
activity["matched_indicator"] = ioc
|
||||
self.detected.append({intent: activity})
|
||||
continue
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
output = self._adb_command("dumpsys package")
|
||||
self._adb_disconnect()
|
||||
|
||||
self.results = parse_dumpsys_activity_resolver_table(output)
|
||||
|
||||
self.log.info("Extracted activities for %d intents", len(self.results))
|
||||
@@ -1,50 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_battery_daily
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DumpsysBatteryDaily(AndroidExtraction):
|
||||
"""This module extracts records from battery daily updates."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
return {
|
||||
"timestamp": record["from"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "battery_daily",
|
||||
"data": f"Recorded update of package {record['package_name']} with vers {record['vers']}"
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_app_id(result["package_name"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
output = self._adb_command("dumpsys batterystats --daily")
|
||||
self._adb_disconnect()
|
||||
|
||||
self.results = parse_dumpsys_battery_daily(output)
|
||||
|
||||
self.log.info("Extracted %d records from battery daily stats", len(self.results))
|
||||
@@ -1,42 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_battery_history
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DumpsysBatteryHistory(AndroidExtraction):
|
||||
"""This module extracts records from battery history events."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_app_id(result["package_name"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
output = self._adb_command("dumpsys batterystats --history")
|
||||
self._adb_disconnect()
|
||||
|
||||
self.results = parse_dumpsys_battery_history(output)
|
||||
|
||||
self.log.info("Extracted %d records from battery history", len(self.results))
|
||||
@@ -1,48 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_dbinfo
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DumpsysDBInfo(AndroidExtraction):
|
||||
"""This module extracts records from battery daily updates."""
|
||||
|
||||
slug = "dumpsys_dbinfo"
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
path = result.get("path", "")
|
||||
for part in path.split("/"):
|
||||
ioc = self.indicators.check_app_id(part)
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
output = self._adb_command("dumpsys dbinfo")
|
||||
self._adb_disconnect()
|
||||
|
||||
self.results = parse_dumpsys_dbinfo(output)
|
||||
|
||||
self.log.info("Extracted a total of %d records from database information",
|
||||
len(self.results))
|
||||
@@ -1,34 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DumpsysFull(AndroidExtraction):
|
||||
"""This module extracts stats on battery consumption by processes."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
|
||||
output = self._adb_command("dumpsys")
|
||||
if self.output_folder:
|
||||
output_path = os.path.join(self.output_folder, "dumpsys.txt")
|
||||
with open(output_path, "w", encoding="utf-8") as handle:
|
||||
handle.write(output)
|
||||
|
||||
log.info("Full dumpsys output stored at %s", output_path)
|
||||
|
||||
self._adb_disconnect()
|
||||
@@ -1,66 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_receiver_resolver_table
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
INTENT_NEW_OUTGOING_SMS = "android.provider.Telephony.NEW_OUTGOING_SMS"
|
||||
INTENT_SMS_RECEIVED = "android.provider.Telephony.SMS_RECEIVED"
|
||||
INTENT_DATA_SMS_RECEIVED = "android.intent.action.DATA_SMS_RECEIVED"
|
||||
INTENT_PHONE_STATE = "android.intent.action.PHONE_STATE"
|
||||
INTENT_NEW_OUTGOING_CALL = "android.intent.action.NEW_OUTGOING_CALL"
|
||||
|
||||
|
||||
class DumpsysReceivers(AndroidExtraction):
|
||||
"""This module extracts details on receivers for risky activities."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.results = results if results else {}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for intent, receivers in self.results.items():
|
||||
for receiver in receivers:
|
||||
if intent == INTENT_NEW_OUTGOING_SMS:
|
||||
self.log.info("Found a receiver to intercept outgoing SMS messages: \"%s\"",
|
||||
receiver["receiver"])
|
||||
elif intent == INTENT_SMS_RECEIVED:
|
||||
self.log.info("Found a receiver to intercept incoming SMS messages: \"%s\"",
|
||||
receiver["receiver"])
|
||||
elif intent == INTENT_DATA_SMS_RECEIVED:
|
||||
self.log.info("Found a receiver to intercept incoming data SMS message: \"%s\"",
|
||||
receiver["receiver"])
|
||||
elif intent == INTENT_PHONE_STATE:
|
||||
self.log.info("Found a receiver monitoring telephony state/incoming calls: \"%s\"",
|
||||
receiver["receiver"])
|
||||
elif intent == INTENT_NEW_OUTGOING_CALL:
|
||||
self.log.info("Found a receiver monitoring outgoing calls: \"%s\"",
|
||||
receiver["receiver"])
|
||||
|
||||
ioc = self.indicators.check_app_id(receiver["package_name"])
|
||||
if ioc:
|
||||
receiver["matched_indicator"] = ioc
|
||||
self.detected.append({intent: receiver})
|
||||
continue
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
|
||||
output = self._adb_command("dumpsys package")
|
||||
self.results = parse_dumpsys_receiver_resolver_table(output)
|
||||
|
||||
self._adb_disconnect()
|
||||
@@ -1,97 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
import stat
|
||||
|
||||
from mvt.common.utils import convert_timestamp_to_iso
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Files(AndroidExtraction):
|
||||
"""This module extracts the list of files on the device."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
self.full_find = False
|
||||
|
||||
def find_files(self, folder):
|
||||
if self.full_find:
|
||||
output = self._adb_command(f"find '{folder}' -printf '%T@ %m %s %u %g %p\n' 2> /dev/null")
|
||||
|
||||
for file_line in output.splitlines():
|
||||
[unix_timestamp, mode, size, owner, group, full_path] = file_line.rstrip().split(" ", 5)
|
||||
mod_time = convert_timestamp_to_iso(datetime.datetime.utcfromtimestamp(int(float(unix_timestamp))))
|
||||
self.results.append({
|
||||
"path": full_path,
|
||||
"modified_time": mod_time,
|
||||
"mode": mode,
|
||||
"is_suid": (int(mode, 8) & stat.S_ISUID) == 2048,
|
||||
"is_sgid": (int(mode, 8) & stat.S_ISGID) == 1024,
|
||||
"size": size,
|
||||
"owner": owner,
|
||||
"group": group,
|
||||
})
|
||||
else:
|
||||
output = self._adb_command(f"find '{folder}' 2> /dev/null")
|
||||
for file_line in output.splitlines():
|
||||
self.results.append({"path": file_line.rstrip()})
|
||||
|
||||
def serialize(self, record):
|
||||
if "modified_time" in record:
|
||||
return {
|
||||
"timestamp": record["modified_time"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "file_modified",
|
||||
"data": record["path"],
|
||||
}
|
||||
|
||||
def check_suspicious(self):
|
||||
"""Check for files with suspicious permissions"""
|
||||
for result in sorted(self.results, key=lambda item: item["path"]):
|
||||
if result.get("is_suid"):
|
||||
self.log.warning("Found an SUID file in a non-standard directory \"%s\".",
|
||||
result["path"])
|
||||
self.detected.append(result)
|
||||
|
||||
def check_indicators(self):
|
||||
"""Check file list for known suspicious files or suspicious properties"""
|
||||
self.check_suspicious()
|
||||
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
if self.indicators.check_file_path(result["path"]):
|
||||
self.log.warning("Found a known suspicous file at path: \"%s\"", result["path"])
|
||||
self.detected.append(result)
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
|
||||
output = self._adb_command("find '/' -maxdepth 1 -printf '%T@ %m %s %u %g %p\n' 2> /dev/null")
|
||||
if output or output.strip().splitlines():
|
||||
self.full_find = True
|
||||
|
||||
for data_path in ["/data/local/tmp/", "/sdcard/", "/tmp/"]:
|
||||
self.find_files(data_path)
|
||||
|
||||
self.log.info("Found %s files in primary Android data directories", len(self.results))
|
||||
|
||||
if self.fast_mode:
|
||||
self.log.info("Flag --fast was enabled: skipping full file listing")
|
||||
else:
|
||||
self.log.info("Processing full file listing. This may take a while...")
|
||||
self.find_files("/")
|
||||
self.log.info("Found %s total files", len(self.results))
|
||||
|
||||
self._adb_disconnect()
|
||||
@@ -1,43 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from mvt.android.parsers import parse_getprop
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Getprop(AndroidExtraction):
|
||||
"""This module extracts device properties from getprop command."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.results = {} if not results else results
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
output = self._adb_command("getprop")
|
||||
self._adb_disconnect()
|
||||
|
||||
self.results = parse_getprop(output)
|
||||
|
||||
# Alert if phone is outdated.
|
||||
security_patch = self.results.get("ro.build.version.security_patch", "")
|
||||
if security_patch:
|
||||
patch_date = datetime.strptime(security_patch, "%Y-%m-%d")
|
||||
if (datetime.now() - patch_date) > timedelta(days=6*30):
|
||||
self.log.warning("This phone has not received security updates for more than "
|
||||
"six months (last update: %s)", security_patch)
|
||||
|
||||
self.log.info("Extracted %d Android system properties", len(self.results))
|
||||
@@ -1,48 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Logcat(AndroidExtraction):
|
||||
"""This module extracts details on installed packages."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
|
||||
# Get the current logcat.
|
||||
output = self._adb_command("logcat -d")
|
||||
# Get the locat prior to last reboot.
|
||||
last_output = self._adb_command("logcat -L")
|
||||
|
||||
if self.output_folder:
|
||||
logcat_path = os.path.join(self.output_folder,
|
||||
"logcat.txt")
|
||||
with open(logcat_path, "w", encoding="utf-8") as handle:
|
||||
handle.write(output)
|
||||
|
||||
log.info("Current logcat logs stored at %s",
|
||||
logcat_path)
|
||||
|
||||
logcat_last_path = os.path.join(self.output_folder,
|
||||
"logcat_last.txt")
|
||||
with open(logcat_last_path, "w", encoding="utf-8") as handle:
|
||||
handle.write(last_output)
|
||||
|
||||
log.info("Logcat logs prior to last reboot stored at %s",
|
||||
logcat_last_path)
|
||||
|
||||
self._adb_disconnect()
|
||||
@@ -1,248 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from mvt.android.lookups.koodous import koodous_lookup
|
||||
from mvt.android.lookups.virustotal import virustotal_lookup
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
DANGEROUS_PERMISSIONS_THRESHOLD = 10
|
||||
DANGEROUS_PERMISSIONS = [
|
||||
"android.permission.ACCESS_COARSE_LOCATION",
|
||||
"android.permission.ACCESS_FINE_LOCATION",
|
||||
"android.permission.AUTHENTICATE_ACCOUNTS",
|
||||
"android.permission.CAMERA",
|
||||
"android.permission.DISABLE_KEYGUARD",
|
||||
"android.permission.PROCESS_OUTGOING_CALLS",
|
||||
"android.permission.READ_CALENDAR",
|
||||
"android.permission.READ_CALL_LOG",
|
||||
"android.permission.READ_CONTACTS",
|
||||
"android.permission.READ_PHONE_STATE",
|
||||
"android.permission.READ_SMS",
|
||||
"android.permission.RECEIVE_MMS",
|
||||
"android.permission.RECEIVE_SMS",
|
||||
"android.permission.RECEIVE_WAP_PUSH",
|
||||
"android.permission.RECORD_AUDIO",
|
||||
"android.permission.SEND_SMS",
|
||||
"android.permission.SYSTEM_ALERT_WINDOW",
|
||||
"android.permission.USE_CREDENTIALS",
|
||||
"android.permission.USE_SIP",
|
||||
"com.android.browser.permission.READ_HISTORY_BOOKMARKS",
|
||||
]
|
||||
|
||||
|
||||
class Packages(AndroidExtraction):
|
||||
"""This module extracts the list of installed packages."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
records = []
|
||||
|
||||
timestamps = [
|
||||
{"event": "package_install", "timestamp": record["timestamp"]},
|
||||
{"event": "package_first_install", "timestamp": record["first_install_time"]},
|
||||
{"event": "package_last_update", "timestamp": record["last_update_time"]},
|
||||
]
|
||||
|
||||
for ts in timestamps:
|
||||
records.append({
|
||||
"timestamp": ts["timestamp"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": ts["event"],
|
||||
"data": f"{record['package_name']} (system: {record['system']}, third party: {record['third_party']})",
|
||||
})
|
||||
|
||||
return records
|
||||
|
||||
def check_indicators(self):
|
||||
root_packages_path = os.path.join("..", "..", "data", "root_packages.txt")
|
||||
root_packages_string = pkg_resources.resource_string(__name__, root_packages_path)
|
||||
root_packages = root_packages_string.decode("utf-8").splitlines()
|
||||
root_packages = [rp.strip() for rp in root_packages]
|
||||
|
||||
for result in self.results:
|
||||
if result["package_name"] in root_packages:
|
||||
self.log.warning("Found an installed package related to rooting/jailbreaking: \"%s\"",
|
||||
result["package_name"])
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
if not self.indicators:
|
||||
continue
|
||||
|
||||
ioc = self.indicators.check_app_id(result.get("package_name"))
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
for package_file in result["files"]:
|
||||
ioc = self.indicators.check_file_hash(package_file["sha256"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
|
||||
@staticmethod
|
||||
def parse_package_for_details(output):
|
||||
details = {
|
||||
"uid": "",
|
||||
"version_name": "",
|
||||
"version_code": "",
|
||||
"timestamp": "",
|
||||
"first_install_time": "",
|
||||
"last_update_time": "",
|
||||
"requested_permissions": [],
|
||||
}
|
||||
|
||||
in_permissions = False
|
||||
for line in output.splitlines():
|
||||
if in_permissions:
|
||||
if line.startswith(" " * 4) and not line.startswith(" " * 6):
|
||||
in_permissions = False
|
||||
continue
|
||||
|
||||
permission = line.strip().split(":")[0]
|
||||
details["requested_permissions"].append(permission)
|
||||
|
||||
if line.strip().startswith("userId="):
|
||||
details["uid"] = line.split("=")[1].strip()
|
||||
elif line.strip().startswith("versionName="):
|
||||
details["version_name"] = line.split("=")[1].strip()
|
||||
elif line.strip().startswith("versionCode="):
|
||||
details["version_code"] = line.split("=", 1)[1].strip()
|
||||
elif line.strip().startswith("timeStamp="):
|
||||
details["timestamp"] = line.split("=")[1].strip()
|
||||
elif line.strip().startswith("firstInstallTime="):
|
||||
details["first_install_time"] = line.split("=")[1].strip()
|
||||
elif line.strip().startswith("lastUpdateTime="):
|
||||
details["last_update_time"] = line.split("=")[1].strip()
|
||||
elif line.strip() == "requested permissions:":
|
||||
in_permissions = True
|
||||
continue
|
||||
|
||||
return details
|
||||
|
||||
def _get_files_for_package(self, package_name):
|
||||
output = self._adb_command(f"pm path {package_name}")
|
||||
output = output.strip().replace("package:", "")
|
||||
if not output:
|
||||
return []
|
||||
|
||||
package_files = []
|
||||
for file_path in output.splitlines():
|
||||
file_path = file_path.strip()
|
||||
|
||||
md5 = self._adb_command(f"md5sum {file_path}").split(" ")[0]
|
||||
sha1 = self._adb_command(f"sha1sum {file_path}").split(" ")[0]
|
||||
sha256 = self._adb_command(f"sha256sum {file_path}").split(" ")[0]
|
||||
sha512 = self._adb_command(f"sha512sum {file_path}").split(" ")[0]
|
||||
|
||||
package_files.append({
|
||||
"path": file_path,
|
||||
"md5": md5,
|
||||
"sha1": sha1,
|
||||
"sha256": sha256,
|
||||
"sha512": sha512,
|
||||
})
|
||||
|
||||
return package_files
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
|
||||
packages = self._adb_command("pm list packages -u -i -f")
|
||||
|
||||
for line in packages.splitlines():
|
||||
line = line.strip()
|
||||
if not line.startswith("package:"):
|
||||
continue
|
||||
|
||||
fields = line.split()
|
||||
file_name, package_name = fields[0].split(":")[1].rsplit("=", 1)
|
||||
|
||||
try:
|
||||
installer = fields[1].split("=")[1].strip()
|
||||
except IndexError:
|
||||
installer = None
|
||||
else:
|
||||
if installer == "null":
|
||||
installer = None
|
||||
|
||||
package_files = self._get_files_for_package(package_name)
|
||||
new_package = {
|
||||
"package_name": package_name,
|
||||
"file_name": file_name,
|
||||
"installer": installer,
|
||||
"disabled": False,
|
||||
"system": False,
|
||||
"third_party": False,
|
||||
"files": package_files,
|
||||
}
|
||||
|
||||
dumpsys_package = self._adb_command(f"dumpsys package {package_name}")
|
||||
package_details = self.parse_package_for_details(dumpsys_package)
|
||||
new_package.update(package_details)
|
||||
|
||||
self.results.append(new_package)
|
||||
|
||||
cmds = [
|
||||
{"field": "disabled", "arg": "-d"},
|
||||
{"field": "system", "arg": "-s"},
|
||||
{"field": "third_party", "arg": "-3"},
|
||||
]
|
||||
for cmd in cmds:
|
||||
output = self._adb_command(f"pm list packages {cmd['arg']}")
|
||||
for line in output.splitlines():
|
||||
line = line.strip()
|
||||
if not line.startswith("package:"):
|
||||
continue
|
||||
|
||||
package_name = line.split(":", 1)[1]
|
||||
for i, result in enumerate(self.results):
|
||||
if result["package_name"] == package_name:
|
||||
self.results[i][cmd["field"]] = True
|
||||
|
||||
for result in self.results:
|
||||
if not result["third_party"]:
|
||||
continue
|
||||
|
||||
dangerous_permissions_count = 0
|
||||
for perm in result["requested_permissions"]:
|
||||
if perm in DANGEROUS_PERMISSIONS:
|
||||
dangerous_permissions_count += 1
|
||||
|
||||
if dangerous_permissions_count >= DANGEROUS_PERMISSIONS_THRESHOLD:
|
||||
self.log.info("Third-party package \"%s\" requested %d potentially dangerous permissions",
|
||||
result["package_name"], dangerous_permissions_count)
|
||||
|
||||
packages_to_lookup = []
|
||||
for result in self.results:
|
||||
if result["system"]:
|
||||
continue
|
||||
|
||||
packages_to_lookup.append(result)
|
||||
self.log.info("Found non-system package with name \"%s\" installed by \"%s\" on %s",
|
||||
result["package_name"], result["installer"], result["timestamp"])
|
||||
|
||||
if not self.fast_mode:
|
||||
virustotal_lookup(packages_to_lookup)
|
||||
koodous_lookup(packages_to_lookup)
|
||||
|
||||
self.log.info("Extracted at total of %d installed package names",
|
||||
len(self.results))
|
||||
|
||||
self._adb_disconnect()
|
||||
@@ -1,65 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Processes(AndroidExtraction):
|
||||
"""This module extracts details on running processes."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_app_id(result.get("name", ""))
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
|
||||
def run(self):
|
||||
self._adb_connect()
|
||||
|
||||
output = self._adb_command("ps -e")
|
||||
|
||||
for line in output.splitlines()[1:]:
|
||||
line = line.strip()
|
||||
if line == "":
|
||||
continue
|
||||
|
||||
fields = line.split()
|
||||
proc = {
|
||||
"user": fields[0],
|
||||
"pid": fields[1],
|
||||
"parent_pid": fields[2],
|
||||
"vsize": fields[3],
|
||||
"rss": fields[4],
|
||||
}
|
||||
|
||||
# Sometimes WCHAN is empty, so we need to re-align output fields.
|
||||
if len(fields) == 8:
|
||||
proc["wchan"] = ""
|
||||
proc["pc"] = fields[5]
|
||||
proc["name"] = fields[7]
|
||||
elif len(fields) == 9:
|
||||
proc["wchan"] = fields[5]
|
||||
proc["pc"] = fields[6]
|
||||
proc["name"] = fields[8]
|
||||
|
||||
self.results.append(proc)
|
||||
|
||||
self._adb_disconnect()
|
||||
|
||||
log.info("Extracted records on a total of %d processes", len(self.results))
|
||||
@@ -1,49 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RootBinaries(AndroidExtraction):
|
||||
"""This module extracts the list of installed packages."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def run(self):
|
||||
root_binaries_path = os.path.join("..", "..", "data", "root_binaries.txt")
|
||||
root_binaries_string = pkg_resources.resource_string(__name__, root_binaries_path)
|
||||
root_binaries = root_binaries_string.decode("utf-8").splitlines()
|
||||
|
||||
self._adb_connect()
|
||||
|
||||
for root_binary in root_binaries:
|
||||
root_binary = root_binary.strip()
|
||||
if not root_binary:
|
||||
continue
|
||||
|
||||
output = self._adb_command(f"which -a {root_binary}")
|
||||
output = output.strip()
|
||||
|
||||
if not output:
|
||||
continue
|
||||
|
||||
if "which: not found" in output:
|
||||
continue
|
||||
|
||||
self.detected.append(root_binary)
|
||||
self.log.warning("Found root binary \"%s\"", root_binary)
|
||||
|
||||
self._adb_disconnect()
|
||||
@@ -1,144 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import base64
|
||||
import getpass
|
||||
import logging
|
||||
import os
|
||||
import sqlite3
|
||||
|
||||
from mvt.android.parsers.backup import (AndroidBackupParsingError,
|
||||
parse_tar_for_sms)
|
||||
from mvt.common.module import InsufficientPrivileges
|
||||
from mvt.common.utils import check_for_links, convert_timestamp_to_iso
|
||||
|
||||
from .base import AndroidExtraction
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
SMS_BUGLE_PATH = "data/data/com.google.android.apps.messaging/databases/bugle_db"
|
||||
SMS_BUGLE_QUERY = """
|
||||
SELECT
|
||||
ppl.normalized_destination AS address,
|
||||
p.timestamp AS timestamp,
|
||||
CASE WHEN m.sender_id IN
|
||||
(SELECT _id FROM participants WHERE contact_id=-1)
|
||||
THEN 2 ELSE 1 END incoming, p.text AS body
|
||||
FROM messages m, conversations c, parts p,
|
||||
participants ppl, conversation_participants cp
|
||||
WHERE (m.conversation_id = c._id)
|
||||
AND (m._id = p.message_id)
|
||||
AND (cp.conversation_id = c._id)
|
||||
AND (cp.participant_id = ppl._id);
|
||||
"""
|
||||
|
||||
SMS_MMSSMS_PATH = "data/data/com.android.providers.telephony/databases/mmssms.db"
|
||||
SMS_MMSMS_QUERY = """
|
||||
SELECT
|
||||
address AS address,
|
||||
date_sent AS timestamp,
|
||||
type as incoming,
|
||||
body AS body
|
||||
FROM sms;
|
||||
"""
|
||||
|
||||
|
||||
class SMS(AndroidExtraction):
|
||||
"""This module extracts all SMS messages containing links."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
body = record["body"].replace("\n", "\\n")
|
||||
return {
|
||||
"timestamp": record["isodate"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": f"sms_{record['direction']}",
|
||||
"data": f"{record['address']}: \"{body}\""
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for message in self.results:
|
||||
if "body" not in message:
|
||||
continue
|
||||
|
||||
# FIXME: check links exported from the body previously
|
||||
message_links = check_for_links(message["body"])
|
||||
if self.indicators.check_domains(message_links):
|
||||
self.detected.append(message)
|
||||
|
||||
def _parse_db(self, db_path):
|
||||
"""Parse an Android bugle_db SMS database file.
|
||||
|
||||
:param db_path: Path to the Android SMS database file to process
|
||||
|
||||
"""
|
||||
conn = sqlite3.connect(db_path)
|
||||
cur = conn.cursor()
|
||||
|
||||
if self.SMS_DB_TYPE == 1:
|
||||
cur.execute(SMS_BUGLE_QUERY)
|
||||
elif self.SMS_DB_TYPE == 2:
|
||||
cur.execute(SMS_MMSMS_QUERY)
|
||||
|
||||
names = [description[0] for description in cur.description]
|
||||
|
||||
for item in cur:
|
||||
message = {}
|
||||
for index, value in enumerate(item):
|
||||
message[names[index]] = value
|
||||
|
||||
message["direction"] = ("received" if message["incoming"] == 1 else "sent")
|
||||
message["isodate"] = convert_timestamp_to_iso(message["timestamp"])
|
||||
|
||||
# If we find links in the messages or if they are empty we add
|
||||
# them to the list of results.
|
||||
if check_for_links(message["body"]) or message["body"].strip() == "":
|
||||
self.results.append(message)
|
||||
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
log.info("Extracted a total of %d SMS messages containing links", len(self.results))
|
||||
|
||||
def _extract_sms_adb(self):
|
||||
"""Use the Android backup command to extract SMS data from the native SMS app
|
||||
|
||||
It is crucial to use the under-documented "-nocompress" flag to disable the non-standard Java compression
|
||||
algorithim. This module only supports an unencrypted ADB backup.
|
||||
"""
|
||||
backup_tar = self._generate_backup("com.android.providers.telephony")
|
||||
if not backup_tar:
|
||||
return
|
||||
|
||||
try:
|
||||
self.results = parse_tar_for_sms(backup_tar)
|
||||
except AndroidBackupParsingError:
|
||||
self.log.info("Impossible to read SMS from the Android Backup, please extract the SMS and try extracting it with Android Backup Extractor")
|
||||
return
|
||||
|
||||
log.info("Extracted a total of %d SMS messages containing links", len(self.results))
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
if (self._adb_check_file_exists(os.path.join("/", SMS_BUGLE_PATH))):
|
||||
self.SMS_DB_TYPE = 1
|
||||
self._adb_process_file(os.path.join("/", SMS_BUGLE_PATH), self._parse_db)
|
||||
elif (self._adb_check_file_exists(os.path.join("/", SMS_MMSSMS_PATH))):
|
||||
self.SMS_DB_TYPE = 2
|
||||
self._adb_process_file(os.path.join("/", SMS_MMSSMS_PATH), self._parse_db)
|
||||
return
|
||||
except InsufficientPrivileges:
|
||||
pass
|
||||
|
||||
self.log.warn("No SMS database found. Trying extraction of SMS data using Android backup feature.")
|
||||
self._extract_sms_adb()
|
||||
@@ -1,46 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import fnmatch
|
||||
import os
|
||||
|
||||
from mvt.common.module import MVTModule
|
||||
|
||||
|
||||
class BackupExtraction(MVTModule):
|
||||
"""This class provides a base for all backup extractios modules"""
|
||||
ab = None
|
||||
|
||||
def from_folder(self, backup_path, files):
|
||||
"""
|
||||
Get all the files and list them
|
||||
"""
|
||||
self.backup_path = backup_path
|
||||
self.files = files
|
||||
|
||||
def from_ab(self, file_path, tar, files):
|
||||
"""
|
||||
Extract the files
|
||||
"""
|
||||
self.ab = file_path
|
||||
self.tar = tar
|
||||
self.files = files
|
||||
|
||||
def _get_files_by_pattern(self, pattern):
|
||||
return fnmatch.filter(self.files, pattern)
|
||||
|
||||
def _get_file_content(self, file_path):
|
||||
if self.ab:
|
||||
try:
|
||||
member = self.tar.getmember(file_path)
|
||||
except KeyError:
|
||||
return None
|
||||
handle = self.tar.extractfile(member)
|
||||
else:
|
||||
handle = open(os.path.join(self.backup_path, file_path), "rb")
|
||||
|
||||
data = handle.read()
|
||||
handle.close()
|
||||
return data
|
||||
@@ -1,36 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
from mvt.android.modules.backup.base import BackupExtraction
|
||||
from mvt.android.parsers.backup import parse_sms_file
|
||||
from mvt.common.utils import check_for_links
|
||||
|
||||
|
||||
class SMS(BackupExtraction):
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
self.results = []
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for message in self.results:
|
||||
if "body" not in message:
|
||||
continue
|
||||
|
||||
if self.indicators.check_domains(message["links"]):
|
||||
self.detected.append(message)
|
||||
|
||||
def run(self):
|
||||
for file in self._get_files_by_pattern("apps/com.android.providers.telephony/d_f/*_sms_backup"):
|
||||
self.log.info("Processing SMS backup file at %s", file)
|
||||
data = self._get_file_content(file)
|
||||
self.results.extend(parse_sms_file(data))
|
||||
self.log.info("Extracted a total of %d SMS messages containing links",
|
||||
len(self.results))
|
||||
@@ -1,16 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
from .accessibility import Accessibility
|
||||
from .activities import Activities
|
||||
from .battery_daily import BatteryDaily
|
||||
from .battery_history import BatteryHistory
|
||||
from .dbinfo import DBInfo
|
||||
from .getprop import Getprop
|
||||
from .packages import Packages
|
||||
from .receivers import Receivers
|
||||
|
||||
BUGREPORT_MODULES = [Accessibility, Activities, BatteryDaily, BatteryHistory,
|
||||
DBInfo, Getprop, Packages, Receivers]
|
||||
@@ -1,60 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_accessibility
|
||||
|
||||
from .base import BugReportModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Accessibility(BugReportModule):
|
||||
"""This module extracts stats on accessibility."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_app_id(result["package_name"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self):
|
||||
content = self._get_dumpstate_file()
|
||||
if not content:
|
||||
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
|
||||
return
|
||||
|
||||
lines = []
|
||||
in_accessibility = False
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip() == "DUMP OF SERVICE accessibility:":
|
||||
in_accessibility = True
|
||||
continue
|
||||
|
||||
if not in_accessibility:
|
||||
continue
|
||||
|
||||
if line.strip().startswith("------------------------------------------------------------------------------"):
|
||||
break
|
||||
|
||||
lines.append(line)
|
||||
|
||||
self.results = parse_dumpsys_accessibility("\n".join(lines))
|
||||
for result in self.results:
|
||||
log.info("Found installed accessibility service \"%s\"", result.get("service"))
|
||||
|
||||
self.log.info("Identified a total of %d accessibility services", len(self.results))
|
||||
@@ -1,61 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_activity_resolver_table
|
||||
|
||||
from .base import BugReportModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Activities(BugReportModule):
|
||||
"""This module extracts details on receivers for risky activities."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.results = results if results else {}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for intent, activities in self.results.items():
|
||||
for activity in activities:
|
||||
ioc = self.indicators.check_app_id(activity["package_name"])
|
||||
if ioc:
|
||||
activity["matched_indicator"] = ioc
|
||||
self.detected.append({intent: activity})
|
||||
continue
|
||||
|
||||
def run(self):
|
||||
content = self._get_dumpstate_file()
|
||||
if not content:
|
||||
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
|
||||
return
|
||||
|
||||
lines = []
|
||||
in_package = False
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip() == "DUMP OF SERVICE package:":
|
||||
in_package = True
|
||||
continue
|
||||
|
||||
if not in_package:
|
||||
continue
|
||||
|
||||
if line.strip().startswith("------------------------------------------------------------------------------"):
|
||||
break
|
||||
|
||||
lines.append(line)
|
||||
|
||||
self.results = parse_dumpsys_activity_resolver_table("\n".join(lines))
|
||||
|
||||
self.log.info("Extracted activities for %d intents", len(self.results))
|
||||
@@ -1,76 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_battery_daily
|
||||
|
||||
from .base import BugReportModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BatteryDaily(BugReportModule):
|
||||
"""This module extracts records from battery daily updates."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
return {
|
||||
"timestamp": record["from"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "battery_daily",
|
||||
"data": f"Recorded update of package {record['package_name']} with vers {record['vers']}"
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_app_id(result["package_name"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self):
|
||||
content = self._get_dumpstate_file()
|
||||
if not content:
|
||||
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
|
||||
return
|
||||
|
||||
lines = []
|
||||
in_batterystats = False
|
||||
in_daily = False
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip() == "DUMP OF SERVICE batterystats:":
|
||||
in_batterystats = True
|
||||
continue
|
||||
|
||||
if not in_batterystats:
|
||||
continue
|
||||
|
||||
if line.strip() == "Daily stats:":
|
||||
lines.append(line)
|
||||
in_daily = True
|
||||
continue
|
||||
|
||||
if not in_daily:
|
||||
continue
|
||||
|
||||
if line.strip() == "":
|
||||
break
|
||||
|
||||
lines.append(line)
|
||||
|
||||
self.results = parse_dumpsys_battery_daily("\n".join(lines))
|
||||
|
||||
self.log.info("Extracted a total of %d battery daily stats",
|
||||
len(self.results))
|
||||
@@ -1,60 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_battery_history
|
||||
|
||||
from .base import BugReportModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BatteryHistory(BugReportModule):
|
||||
"""This module extracts records from battery daily updates."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_app_id(result["package_name"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self):
|
||||
content = self._get_dumpstate_file()
|
||||
if not content:
|
||||
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
|
||||
return
|
||||
|
||||
lines = []
|
||||
in_history = False
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip().startswith("Battery History "):
|
||||
lines.append(line)
|
||||
in_history = True
|
||||
continue
|
||||
|
||||
if not in_history:
|
||||
continue
|
||||
|
||||
if line.strip() == "":
|
||||
break
|
||||
|
||||
lines.append(line)
|
||||
|
||||
self.results = parse_dumpsys_battery_history("\n".join(lines))
|
||||
|
||||
self.log.info("Extracted a total of %d battery history records",
|
||||
len(self.results))
|
||||
@@ -1,63 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_dbinfo
|
||||
|
||||
from .base import BugReportModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DBInfo(BugReportModule):
|
||||
"""This module extracts records from battery daily updates."""
|
||||
|
||||
slug = "dbinfo"
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
path = result.get("path", "")
|
||||
for part in path.split("/"):
|
||||
ioc = self.indicators.check_app_id(part)
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self):
|
||||
content = self._get_dumpstate_file()
|
||||
if not content:
|
||||
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
|
||||
return
|
||||
|
||||
in_dbinfo = False
|
||||
lines = []
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip() == "DUMP OF SERVICE dbinfo:":
|
||||
in_dbinfo = True
|
||||
continue
|
||||
|
||||
if not in_dbinfo:
|
||||
continue
|
||||
|
||||
if line.strip().startswith("------------------------------------------------------------------------------"):
|
||||
break
|
||||
|
||||
lines.append(line)
|
||||
|
||||
self.results = parse_dumpsys_dbinfo("\n".join(lines))
|
||||
|
||||
self.log.info("Extracted a total of %d database connection pool records",
|
||||
len(self.results))
|
||||
@@ -1,59 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from mvt.android.parsers import parse_getprop
|
||||
|
||||
from .base import BugReportModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Getprop(BugReportModule):
|
||||
"""This module extracts device properties from getprop command."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.results = {} if not results else results
|
||||
|
||||
def run(self):
|
||||
content = self._get_dumpstate_file()
|
||||
if not content:
|
||||
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
|
||||
return
|
||||
|
||||
lines = []
|
||||
in_getprop = False
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip() == "------ SYSTEM PROPERTIES (getprop) ------":
|
||||
in_getprop = True
|
||||
continue
|
||||
|
||||
if not in_getprop:
|
||||
continue
|
||||
|
||||
if line.strip() == "------":
|
||||
break
|
||||
|
||||
lines.append(line)
|
||||
|
||||
self.results = parse_getprop("\n".join(lines))
|
||||
|
||||
# Alert if phone is outdated.
|
||||
security_patch = self.results.get("ro.build.version.security_patch", "")
|
||||
if security_patch:
|
||||
patch_date = datetime.strptime(security_patch, "%Y-%m-%d")
|
||||
if (datetime.now() - patch_date) > timedelta(days=6*30):
|
||||
self.log.warning("This phone has not received security updates for more than "
|
||||
"six months (last update: %s)", security_patch)
|
||||
|
||||
self.log.info("Extracted %d Android system properties", len(self.results))
|
||||
@@ -1,117 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
from mvt.android.modules.adb.packages import Packages as PCK
|
||||
|
||||
from .base import BugReportModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Packages(BugReportModule):
|
||||
"""This module extracts details on receivers for risky activities."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
records = []
|
||||
|
||||
timestamps = [
|
||||
{"event": "package_install", "timestamp": record["timestamp"]},
|
||||
{"event": "package_first_install", "timestamp": record["first_install_time"]},
|
||||
{"event": "package_last_update", "timestamp": record["last_update_time"]},
|
||||
]
|
||||
|
||||
for ts in timestamps:
|
||||
records.append({
|
||||
"timestamp": ts["timestamp"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": ts["event"],
|
||||
"data": f"Install or update of package {record['package_name']}",
|
||||
})
|
||||
|
||||
return records
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_app_id(result["package_name"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
@staticmethod
|
||||
def parse_packages_list(output):
|
||||
pkg_rxp = re.compile(r" Package \[(.+?)\].*")
|
||||
|
||||
results = []
|
||||
package_name = None
|
||||
package = {}
|
||||
lines = []
|
||||
for line in output.splitlines():
|
||||
if line.startswith(" Package ["):
|
||||
if len(lines) > 0:
|
||||
details = PCK.parse_package_for_details("\n".join(lines))
|
||||
package.update(details)
|
||||
results.append(package)
|
||||
package = {}
|
||||
|
||||
matches = pkg_rxp.findall(line)
|
||||
if not matches:
|
||||
continue
|
||||
|
||||
package_name = matches[0]
|
||||
package["package_name"] = package_name
|
||||
continue
|
||||
|
||||
if not package_name:
|
||||
continue
|
||||
|
||||
lines.append(line)
|
||||
|
||||
return results
|
||||
|
||||
def run(self):
|
||||
content = self._get_dumpstate_file()
|
||||
if not content:
|
||||
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
|
||||
return
|
||||
|
||||
in_package = False
|
||||
in_packages_list = False
|
||||
lines = []
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip() == "DUMP OF SERVICE package:":
|
||||
in_package = True
|
||||
continue
|
||||
|
||||
if not in_package:
|
||||
continue
|
||||
|
||||
if line.strip() == "Packages:":
|
||||
in_packages_list = True
|
||||
continue
|
||||
|
||||
if not in_packages_list:
|
||||
continue
|
||||
|
||||
if line.strip() == "":
|
||||
break
|
||||
|
||||
lines.append(line)
|
||||
|
||||
self.results = self.parse_packages_list("\n".join(lines))
|
||||
|
||||
self.log.info("Extracted details on %d packages", len(self.results))
|
||||
@@ -1,83 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
|
||||
from mvt.android.parsers import parse_dumpsys_receiver_resolver_table
|
||||
|
||||
from .base import BugReportModule
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
INTENT_NEW_OUTGOING_SMS = "android.provider.Telephony.NEW_OUTGOING_SMS"
|
||||
INTENT_SMS_RECEIVED = "android.provider.Telephony.SMS_RECEIVED"
|
||||
INTENT_DATA_SMS_RECEIVED = "android.intent.action.DATA_SMS_RECEIVED"
|
||||
INTENT_PHONE_STATE = "android.intent.action.PHONE_STATE"
|
||||
INTENT_NEW_OUTGOING_CALL = "android.intent.action.NEW_OUTGOING_CALL"
|
||||
|
||||
|
||||
class Receivers(BugReportModule):
|
||||
"""This module extracts details on receivers for risky activities."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
serial=None, fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.results = results if results else {}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for intent, receivers in self.results.items():
|
||||
for receiver in receivers:
|
||||
if intent == INTENT_NEW_OUTGOING_SMS:
|
||||
self.log.info("Found a receiver to intercept outgoing SMS messages: \"%s\"",
|
||||
receiver["receiver"])
|
||||
elif intent == INTENT_SMS_RECEIVED:
|
||||
self.log.info("Found a receiver to intercept incoming SMS messages: \"%s\"",
|
||||
receiver["receiver"])
|
||||
elif intent == INTENT_DATA_SMS_RECEIVED:
|
||||
self.log.info("Found a receiver to intercept incoming data SMS message: \"%s\"",
|
||||
receiver["receiver"])
|
||||
elif intent == INTENT_PHONE_STATE:
|
||||
self.log.info("Found a receiver monitoring telephony state/incoming calls: \"%s\"",
|
||||
receiver["receiver"])
|
||||
elif intent == INTENT_NEW_OUTGOING_CALL:
|
||||
self.log.info("Found a receiver monitoring outgoing calls: \"%s\"",
|
||||
receiver["receiver"])
|
||||
|
||||
ioc = self.indicators.check_app_id(receiver["package_name"])
|
||||
if ioc:
|
||||
receiver["matched_indicator"] = ioc
|
||||
self.detected.append({intent: receiver})
|
||||
continue
|
||||
|
||||
def run(self):
|
||||
content = self._get_dumpstate_file()
|
||||
if not content:
|
||||
self.log.error("Unable to find dumpstate file. Did you provide a valid bug report archive?")
|
||||
return
|
||||
|
||||
in_receivers = False
|
||||
lines = []
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip() == "DUMP OF SERVICE package:":
|
||||
in_receivers = True
|
||||
continue
|
||||
|
||||
if not in_receivers:
|
||||
continue
|
||||
|
||||
if line.strip().startswith("------------------------------------------------------------------------------"):
|
||||
break
|
||||
|
||||
lines.append(line)
|
||||
|
||||
self.results = parse_dumpsys_receiver_resolver_table("\n".join(lines))
|
||||
|
||||
self.log.info("Extracted receivers for %d intents", len(self.results))
|
||||
@@ -1,11 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
from .dumpsys import (parse_dumpsys_accessibility,
|
||||
parse_dumpsys_activity_resolver_table,
|
||||
parse_dumpsys_battery_daily,
|
||||
parse_dumpsys_battery_history, parse_dumpsys_dbinfo,
|
||||
parse_dumpsys_receiver_resolver_table)
|
||||
from .getprop import parse_getprop
|
||||
@@ -1,287 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import re
|
||||
|
||||
|
||||
def parse_dumpsys_accessibility(output):
|
||||
results = []
|
||||
|
||||
in_services = False
|
||||
for line in output.splitlines():
|
||||
if line.strip().startswith("installed services:"):
|
||||
in_services = True
|
||||
continue
|
||||
|
||||
if not in_services:
|
||||
continue
|
||||
|
||||
if line.strip() == "}":
|
||||
break
|
||||
|
||||
service = line.split(":")[1].strip()
|
||||
|
||||
results.append({
|
||||
"package_name": service.split("/")[0],
|
||||
"service": service,
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def parse_dumpsys_activity_resolver_table(output):
|
||||
results = {}
|
||||
|
||||
in_activity_resolver_table = False
|
||||
in_non_data_actions = False
|
||||
intent = None
|
||||
for line in output.splitlines():
|
||||
if line.startswith("Activity Resolver Table:"):
|
||||
in_activity_resolver_table = True
|
||||
continue
|
||||
|
||||
if not in_activity_resolver_table:
|
||||
continue
|
||||
|
||||
if line.startswith(" Non-Data Actions:"):
|
||||
in_non_data_actions = True
|
||||
continue
|
||||
|
||||
if not in_non_data_actions:
|
||||
continue
|
||||
|
||||
# If we hit an empty line, the Non-Data Actions section should be
|
||||
# finished.
|
||||
if line.strip() == "":
|
||||
break
|
||||
|
||||
# We detect the action name.
|
||||
if line.startswith(" " * 6) and not line.startswith(" " * 8) and ":" in line:
|
||||
intent = line.strip().replace(":", "")
|
||||
results[intent] = []
|
||||
continue
|
||||
|
||||
# If we are not in an intent block yet, skip.
|
||||
if not intent:
|
||||
continue
|
||||
|
||||
# If we are in a block but the line does not start with 8 spaces
|
||||
# it means the block ended a new one started, so we reset and
|
||||
# continue.
|
||||
if not line.startswith(" " * 8):
|
||||
intent = None
|
||||
continue
|
||||
|
||||
# If we got this far, we are processing receivers for the
|
||||
# activities we are interested in.
|
||||
activity = line.strip().split(" ")[1]
|
||||
package_name = activity.split("/")[0]
|
||||
|
||||
results[intent].append({
|
||||
"package_name": package_name,
|
||||
"activity": activity,
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def parse_dumpsys_battery_daily(output):
|
||||
results = []
|
||||
daily = None
|
||||
daily_updates = []
|
||||
for line in output.splitlines():
|
||||
if line.startswith(" Daily from "):
|
||||
if len(daily_updates) > 0:
|
||||
results.extend(daily_updates)
|
||||
daily_updates = []
|
||||
|
||||
timeframe = line[13:].strip()
|
||||
date_from, date_to = timeframe.strip(":").split(" to ", 1)
|
||||
daily = {"from": date_from[0:10], "to": date_to[0:10]}
|
||||
continue
|
||||
|
||||
if not daily:
|
||||
continue
|
||||
|
||||
if not line.strip().startswith("Update "):
|
||||
continue
|
||||
|
||||
line = line.strip().replace("Update ", "")
|
||||
package_name, vers = line.split(" ", 1)
|
||||
vers_nr = vers.split("=", 1)[1]
|
||||
|
||||
already_seen = False
|
||||
for update in daily_updates:
|
||||
if package_name == update["package_name"] and vers_nr == update["vers"]:
|
||||
already_seen = True
|
||||
break
|
||||
|
||||
if not already_seen:
|
||||
daily_updates.append({
|
||||
"action": "update",
|
||||
"from": daily["from"],
|
||||
"to": daily["to"],
|
||||
"package_name": package_name,
|
||||
"vers": vers_nr,
|
||||
})
|
||||
|
||||
if len(daily_updates) > 0:
|
||||
results.extend(daily_updates)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def parse_dumpsys_battery_history(output):
|
||||
results = []
|
||||
|
||||
for line in output.splitlines():
|
||||
if line.startswith("Battery History "):
|
||||
continue
|
||||
|
||||
if line.strip() == "":
|
||||
break
|
||||
|
||||
time_elapsed = line.strip().split(" ", 1)[0]
|
||||
|
||||
event = ""
|
||||
if line.find("+job") > 0:
|
||||
event = "start_job"
|
||||
uid = line[line.find("+job")+5:line.find(":")]
|
||||
service = line[line.find(":")+1:].strip('"')
|
||||
package_name = service.split("/")[0]
|
||||
elif line.find("-job") > 0:
|
||||
event = "end_job"
|
||||
uid = line[line.find("-job")+5:line.find(":")]
|
||||
service = line[line.find(":")+1:].strip('"')
|
||||
package_name = service.split("/")[0]
|
||||
elif line.find("+running +wake_lock=") > 0:
|
||||
uid = line[line.find("+running +wake_lock=")+21:line.find(":")]
|
||||
event = "wake"
|
||||
service = line[line.find("*walarm*:")+9:].split(" ")[0].strip('"').strip()
|
||||
if service == "" or "/" not in service:
|
||||
continue
|
||||
|
||||
package_name = service.split("/")[0]
|
||||
else:
|
||||
continue
|
||||
|
||||
results.append({
|
||||
"time_elapsed": time_elapsed,
|
||||
"event": event,
|
||||
"uid": uid,
|
||||
"package_name": package_name,
|
||||
"service": service,
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def parse_dumpsys_dbinfo(output):
|
||||
results = []
|
||||
|
||||
rxp = re.compile(r'.*\[([0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3})\].*\[Pid:\((\d+)\)\](\w+).*sql\=\"(.+?)\"')
|
||||
rxp_no_pid = re.compile(r'.*\[([0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{3})\][ ]{1}(\w+).*sql\=\"(.+?)\"')
|
||||
|
||||
pool = None
|
||||
in_operations = False
|
||||
for line in output.splitlines():
|
||||
if line.startswith("Connection pool for "):
|
||||
pool = line.replace("Connection pool for ", "").rstrip(":")
|
||||
|
||||
if not pool:
|
||||
continue
|
||||
|
||||
if line.strip() == "Most recently executed operations:":
|
||||
in_operations = True
|
||||
continue
|
||||
|
||||
if not in_operations:
|
||||
continue
|
||||
|
||||
if not line.startswith(" "):
|
||||
in_operations = False
|
||||
pool = None
|
||||
continue
|
||||
|
||||
matches = rxp.findall(line)
|
||||
if not matches:
|
||||
matches = rxp_no_pid.findall(line)
|
||||
if not matches:
|
||||
continue
|
||||
else:
|
||||
match = matches[0]
|
||||
results.append({
|
||||
"isodate": match[0],
|
||||
"action": match[1],
|
||||
"sql": match[2],
|
||||
"path": pool,
|
||||
})
|
||||
else:
|
||||
match = matches[0]
|
||||
results.append({
|
||||
"isodate": match[0],
|
||||
"pid": match[1],
|
||||
"action": match[2],
|
||||
"sql": match[3],
|
||||
"path": pool,
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def parse_dumpsys_receiver_resolver_table(output):
|
||||
results = {}
|
||||
|
||||
in_receiver_resolver_table = False
|
||||
in_non_data_actions = False
|
||||
intent = None
|
||||
for line in output.splitlines():
|
||||
if line.startswith("Receiver Resolver Table:"):
|
||||
in_receiver_resolver_table = True
|
||||
continue
|
||||
|
||||
if not in_receiver_resolver_table:
|
||||
continue
|
||||
|
||||
if line.startswith(" Non-Data Actions:"):
|
||||
in_non_data_actions = True
|
||||
continue
|
||||
|
||||
if not in_non_data_actions:
|
||||
continue
|
||||
|
||||
# If we hit an empty line, the Non-Data Actions section should be
|
||||
# finished.
|
||||
if line.strip() == "":
|
||||
break
|
||||
|
||||
# We detect the action name.
|
||||
if line.startswith(" " * 6) and not line.startswith(" " * 8) and ":" in line:
|
||||
intent = line.strip().replace(":", "")
|
||||
results[intent] = []
|
||||
continue
|
||||
|
||||
# If we are not in an intent block yet, skip.
|
||||
if not intent:
|
||||
continue
|
||||
|
||||
# If we are in a block but the line does not start with 8 spaces
|
||||
# it means the block ended a new one started, so we reset and
|
||||
# continue.
|
||||
if not line.startswith(" " * 8):
|
||||
intent = None
|
||||
continue
|
||||
|
||||
# If we got this far, we are processing receivers for the
|
||||
# activities we are interested in.
|
||||
receiver = line.strip().split(" ")[1]
|
||||
package_name = receiver.split("/")[0]
|
||||
|
||||
results[intent].append({
|
||||
"package_name": package_name,
|
||||
"receiver": receiver,
|
||||
})
|
||||
|
||||
return results
|
||||
@@ -1,26 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import re
|
||||
|
||||
|
||||
def parse_getprop(output):
|
||||
results = {}
|
||||
rxp = re.compile(r"\[(.+?)\]: \[(.+?)\]")
|
||||
|
||||
for line in output.splitlines():
|
||||
line = line.strip()
|
||||
if line == "":
|
||||
continue
|
||||
|
||||
matches = re.findall(rxp, line)
|
||||
if not matches or len(matches[0]) != 2:
|
||||
continue
|
||||
|
||||
key = matches[0][0]
|
||||
value = matches[0][1]
|
||||
results[key] = value
|
||||
|
||||
return results
|
||||
@@ -1,14 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
# Help messages of repeating options.
|
||||
HELP_MSG_OUTPUT = "Specify a path to a folder where you want to store JSON results"
|
||||
HELP_MSG_IOC = "Path to indicators file (can be invoked multiple time)"
|
||||
HELP_MSG_FAST = "Avoid running time/resource consuming features"
|
||||
HELP_MSG_LIST_MODULES = "Print list of available modules and exit"
|
||||
HELP_MSG_MODULE = "Name of a single module you would like to run instead of all"
|
||||
|
||||
# Android-specific.
|
||||
HELP_MSG_SERIAL = "Specify a device serial number or HOST:PORT connection string"
|
||||
@@ -1,480 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import requests
|
||||
from appdirs import user_data_dir
|
||||
|
||||
from .url import URL
|
||||
|
||||
|
||||
class Indicators:
|
||||
"""This class is used to parse indicators from a STIX2 file and provide
|
||||
functions to compare extracted artifacts to the indicators.
|
||||
"""
|
||||
|
||||
def __init__(self, log=None):
|
||||
self.data_dir = user_data_dir("mvt")
|
||||
self.log = log
|
||||
self.ioc_collections = []
|
||||
self.total_ioc_count = 0
|
||||
|
||||
def _load_downloaded_indicators(self):
|
||||
if not os.path.isdir(self.data_dir):
|
||||
return
|
||||
|
||||
for f in os.listdir(self.data_dir):
|
||||
if f.lower().endswith(".stix2"):
|
||||
self.parse_stix2(os.path.join(self.data_dir, f))
|
||||
|
||||
def _check_stix2_env_variable(self):
|
||||
"""
|
||||
Checks if a variable MVT_STIX2 contains path to a STIX files.
|
||||
"""
|
||||
if "MVT_STIX2" not in os.environ:
|
||||
return
|
||||
|
||||
paths = os.environ["MVT_STIX2"].split(":")
|
||||
for path in paths:
|
||||
if os.path.isfile(path):
|
||||
self.parse_stix2(path)
|
||||
else:
|
||||
self.log.error("Path specified with env MVT_STIX2 is not a valid file: %s",
|
||||
path)
|
||||
|
||||
def _new_collection(self, cid="", name="", description="", file_name="",
|
||||
file_path=""):
|
||||
return {
|
||||
"id": cid,
|
||||
"name": name,
|
||||
"description": description,
|
||||
"stix2_file_name": file_name,
|
||||
"stix2_file_path": file_path,
|
||||
"domains": [],
|
||||
"processes": [],
|
||||
"emails": [],
|
||||
"file_names": [],
|
||||
"file_paths": [],
|
||||
"files_sha256": [],
|
||||
"app_ids": [],
|
||||
"ios_profile_ids": [],
|
||||
"count": 0,
|
||||
}
|
||||
|
||||
def _add_indicator(self, ioc, ioc_coll, ioc_coll_list):
|
||||
ioc = ioc.strip("'")
|
||||
if ioc not in ioc_coll_list:
|
||||
ioc_coll_list.append(ioc)
|
||||
ioc_coll["count"] += 1
|
||||
self.total_ioc_count += 1
|
||||
|
||||
def parse_stix2(self, file_path):
|
||||
"""Extract indicators from a STIX2 file.
|
||||
|
||||
:param file_path: Path to the STIX2 file to parse
|
||||
:type file_path: str
|
||||
|
||||
"""
|
||||
self.log.info("Parsing STIX2 indicators file at path %s", file_path)
|
||||
|
||||
with open(file_path, "r", encoding="utf-8") as handle:
|
||||
try:
|
||||
data = json.load(handle)
|
||||
except json.decoder.JSONDecodeError:
|
||||
self.log.critical("Unable to parse STIX2 indicator file. "
|
||||
"The file is corrupted or in the wrong format!")
|
||||
return
|
||||
|
||||
malware = {}
|
||||
indicators = []
|
||||
relationships = []
|
||||
for entry in data.get("objects", []):
|
||||
entry_type = entry.get("type", "")
|
||||
if entry_type == "malware":
|
||||
malware[entry["id"]] = {
|
||||
"name": entry["name"],
|
||||
"description": entry["description"],
|
||||
}
|
||||
elif entry_type == "indicator":
|
||||
indicators.append(entry)
|
||||
elif entry_type == "relationship":
|
||||
relationships.append(entry)
|
||||
|
||||
collections = []
|
||||
for mal_id, mal_values in malware.items():
|
||||
collection = self._new_collection(mal_id, mal_values.get("name"),
|
||||
mal_values.get("description"),
|
||||
os.path.basename(file_path),
|
||||
file_path)
|
||||
collections.append(collection)
|
||||
|
||||
# We loop through all indicators.
|
||||
for indicator in indicators:
|
||||
malware_id = None
|
||||
|
||||
# We loop through all relationships and find the one pertinent to
|
||||
# the current indicator.
|
||||
for relationship in relationships:
|
||||
if relationship["source_ref"] != indicator["id"]:
|
||||
continue
|
||||
|
||||
# Look for a malware definition with the correct identifier.
|
||||
if relationship["target_ref"] in malware.keys():
|
||||
malware_id = relationship["target_ref"]
|
||||
break
|
||||
|
||||
# Now we look for the correct collection matching the malware ID we
|
||||
# got from the relationship.
|
||||
for collection in collections:
|
||||
if collection["id"] != malware_id:
|
||||
continue
|
||||
|
||||
key, value = indicator.get("pattern", "").strip("[]").split("=")
|
||||
|
||||
if key == "domain-name:value":
|
||||
# We force domain names to lower case.
|
||||
self._add_indicator(ioc=value.lower(),
|
||||
ioc_coll=collection,
|
||||
ioc_coll_list=collection["domains"])
|
||||
elif key == "process:name":
|
||||
self._add_indicator(ioc=value,
|
||||
ioc_coll=collection,
|
||||
ioc_coll_list=collection["processes"])
|
||||
elif key == "email-addr:value":
|
||||
# We force email addresses to lower case.
|
||||
self._add_indicator(ioc=value.lower(),
|
||||
ioc_coll=collection,
|
||||
ioc_coll_list=collection["emails"])
|
||||
elif key == "file:name":
|
||||
self._add_indicator(ioc=value,
|
||||
ioc_coll=collection,
|
||||
ioc_coll_list=collection["file_names"])
|
||||
elif key == "file:path":
|
||||
self._add_indicator(ioc=value,
|
||||
ioc_coll=collection,
|
||||
ioc_coll_list=collection["file_paths"])
|
||||
elif key == "file:hashes.sha256":
|
||||
self._add_indicator(ioc=value,
|
||||
ioc_coll=collection,
|
||||
ioc_coll_list=collection["files_sha256"])
|
||||
elif key == "app:id":
|
||||
self._add_indicator(ioc=value,
|
||||
ioc_coll=collection,
|
||||
ioc_coll_list=collection["app_ids"])
|
||||
elif key == "configuration-profile:id":
|
||||
self._add_indicator(ioc=value,
|
||||
ioc_coll=collection,
|
||||
ioc_coll_list=collection["ios_profile_ids"])
|
||||
|
||||
break
|
||||
|
||||
for coll in collections:
|
||||
self.log.info("Extracted %d indicators for collection with name \"%s\"",
|
||||
coll["count"], coll["name"])
|
||||
|
||||
self.ioc_collections.extend(collections)
|
||||
|
||||
def load_indicators_files(self, files, load_default=True):
|
||||
"""
|
||||
Load a list of indicators files.
|
||||
"""
|
||||
for file_path in files:
|
||||
if os.path.isfile(file_path):
|
||||
self.parse_stix2(file_path)
|
||||
else:
|
||||
self.log.warning("No indicators file exists at path %s",
|
||||
file_path)
|
||||
|
||||
# Load downloaded indicators and any indicators from env variable.
|
||||
if load_default:
|
||||
self._load_downloaded_indicators()
|
||||
|
||||
self._check_stix2_env_variable()
|
||||
self.log.info("Loaded a total of %d unique indicators", self.total_ioc_count)
|
||||
|
||||
def get_iocs(self, ioc_type):
|
||||
for ioc_collection in self.ioc_collections:
|
||||
for ioc in ioc_collection.get(ioc_type, []):
|
||||
yield {
|
||||
"value": ioc,
|
||||
"type": ioc_type,
|
||||
"name": ioc_collection["name"],
|
||||
"stix2_file_name": ioc_collection["stix2_file_name"],
|
||||
}
|
||||
|
||||
def check_domain(self, url):
|
||||
"""Check if a given URL matches any of the provided domain indicators.
|
||||
|
||||
:param url: URL to match against domain indicators
|
||||
:type url: str
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
# TODO: If the IOC domain contains a subdomain, it is not currently
|
||||
# being matched.
|
||||
if not url:
|
||||
return None
|
||||
|
||||
try:
|
||||
# First we use the provided URL.
|
||||
orig_url = URL(url)
|
||||
|
||||
if orig_url.check_if_shortened():
|
||||
# If it is, we try to retrieve the actual URL making an
|
||||
# HTTP HEAD request.
|
||||
unshortened = orig_url.unshorten()
|
||||
|
||||
# self.log.info("Found a shortened URL %s -> %s",
|
||||
# url, unshortened)
|
||||
|
||||
# Now we check for any nested URL shorteners.
|
||||
dest_url = URL(unshortened)
|
||||
if dest_url.check_if_shortened():
|
||||
# self.log.info("Original URL %s appears to shorten another shortened URL %s ... checking!",
|
||||
# orig_url.url, dest_url.url)
|
||||
return self.check_domain(dest_url.url)
|
||||
|
||||
final_url = dest_url
|
||||
else:
|
||||
# If it's not shortened, we just use the original URL object.
|
||||
final_url = orig_url
|
||||
except Exception:
|
||||
# If URL parsing failed, we just try to do a simple substring
|
||||
# match.
|
||||
for ioc in self.get_iocs("domains"):
|
||||
if ioc["value"].lower() in url:
|
||||
self.log.warning("Maybe found a known suspicious domain %s matching indicators from \"%s\"",
|
||||
url, ioc["name"])
|
||||
return ioc
|
||||
|
||||
# If nothing matched, we can quit here.
|
||||
return None
|
||||
|
||||
# If all parsing worked, we start walking through available domain indicators.
|
||||
for ioc in self.get_iocs("domains"):
|
||||
# First we check the full domain.
|
||||
if final_url.domain.lower() == ioc["value"]:
|
||||
if orig_url.is_shortened and orig_url.url != final_url.url:
|
||||
self.log.warning("Found a known suspicious domain %s shortened as %s matching indicators from \"%s\"",
|
||||
final_url.url, orig_url.url, ioc["name"])
|
||||
else:
|
||||
self.log.warning("Found a known suspicious domain %s matching indicators from \"%s\"",
|
||||
final_url.url, ioc["name"])
|
||||
|
||||
return ioc
|
||||
|
||||
# Then we just check the top level domain.
|
||||
if final_url.top_level.lower() == ioc["value"]:
|
||||
if orig_url.is_shortened and orig_url.url != final_url.url:
|
||||
self.log.warning("Found a sub-domain with suspicious top level %s shortened as %s matching indicators from \"%s\"",
|
||||
final_url.url, orig_url.url, ioc["name"])
|
||||
else:
|
||||
self.log.warning("Found a sub-domain with a suspicious top level %s matching indicators from \"%s\"",
|
||||
final_url.url, ioc["name"])
|
||||
|
||||
return ioc
|
||||
|
||||
def check_domains(self, urls):
|
||||
"""Check a list of URLs against the provided list of domain indicators.
|
||||
|
||||
:param urls: List of URLs to check against domain indicators
|
||||
:type urls: list
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
if not urls:
|
||||
return None
|
||||
|
||||
for url in urls:
|
||||
check = self.check_domain(url)
|
||||
if check:
|
||||
return check
|
||||
|
||||
def check_process(self, process):
|
||||
"""Check the provided process name against the list of process
|
||||
indicators.
|
||||
|
||||
:param process: Process name to check against process indicators
|
||||
:type process: str
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
if not process:
|
||||
return None
|
||||
|
||||
proc_name = os.path.basename(process)
|
||||
for ioc in self.get_iocs("processes"):
|
||||
if proc_name == ioc["value"]:
|
||||
self.log.warning("Found a known suspicious process name \"%s\" matching indicators from \"%s\"",
|
||||
process, ioc["name"])
|
||||
return ioc
|
||||
|
||||
if len(proc_name) == 16:
|
||||
if ioc["value"].startswith(proc_name):
|
||||
self.log.warning("Found a truncated known suspicious process name \"%s\" matching indicators from \"%s\"",
|
||||
process, ioc["name"])
|
||||
return ioc
|
||||
|
||||
def check_processes(self, processes):
|
||||
"""Check the provided list of processes against the list of
|
||||
process indicators.
|
||||
|
||||
:param processes: List of processes to check against process indicators
|
||||
:type processes: list
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
if not processes:
|
||||
return None
|
||||
|
||||
for process in processes:
|
||||
check = self.check_process(process)
|
||||
if check:
|
||||
return check
|
||||
|
||||
def check_email(self, email):
|
||||
"""Check the provided email against the list of email indicators.
|
||||
|
||||
:param email: Email address to check against email indicators
|
||||
:type email: str
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
if not email:
|
||||
return None
|
||||
|
||||
for ioc in self.get_iocs("emails"):
|
||||
if email.lower() == ioc["value"].lower():
|
||||
self.log.warning("Found a known suspicious email address \"%s\" matching indicators from \"%s\"",
|
||||
email, ioc["name"])
|
||||
return ioc
|
||||
|
||||
def check_file_name(self, file_name):
|
||||
"""Check the provided file name against the list of file indicators.
|
||||
|
||||
:param file_name: File name to check against file
|
||||
indicators
|
||||
:type file_name: str
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
if not file_name:
|
||||
return None
|
||||
|
||||
for ioc in self.get_iocs("file_names"):
|
||||
if ioc["value"] == file_name:
|
||||
self.log.warning("Found a known suspicious file name \"%s\" matching indicators from \"%s\"",
|
||||
file_name, ioc["name"])
|
||||
return ioc
|
||||
|
||||
def check_file_path(self, file_path):
|
||||
"""Check the provided file path against the list of file indicators (both path and name).
|
||||
|
||||
:param file_path: File path or file name to check against file
|
||||
indicators
|
||||
:type file_path: str
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
if not file_path:
|
||||
return None
|
||||
|
||||
ioc = self.check_file_name(os.path.basename(file_path))
|
||||
if ioc:
|
||||
return ioc
|
||||
|
||||
for ioc in self.get_iocs("file_paths"):
|
||||
# Strip any trailing slash from indicator paths to match directories.
|
||||
if file_path.startswith(ioc["value"].rstrip("/")):
|
||||
self.log.warning("Found a known suspicious file path \"%s\" matching indicators form \"%s\"",
|
||||
file_path, ioc["name"])
|
||||
return ioc
|
||||
|
||||
def check_profile(self, profile_uuid):
|
||||
"""Check the provided configuration profile UUID against the list of indicators.
|
||||
|
||||
:param profile_uuid: Profile UUID to check against configuration profile indicators
|
||||
:type profile_uuid: str
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
if not profile_uuid:
|
||||
return None
|
||||
|
||||
for ioc in self.get_iocs("ios_profile_ids"):
|
||||
if profile_uuid in ioc["value"]:
|
||||
self.log.warning("Found a known suspicious profile ID \"%s\" matching indicators from \"%s\"",
|
||||
profile_uuid, ioc["name"])
|
||||
return ioc
|
||||
|
||||
def check_file_hash(self, file_hash):
|
||||
"""Check the provided SHA256 file hash against the list of indicators.
|
||||
|
||||
:param file_hash: SHA256 hash to check
|
||||
:type file_hash: str
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
if not file_hash:
|
||||
return None
|
||||
|
||||
for ioc in self.get_iocs("files_sha256"):
|
||||
if file_hash.lower() == ioc["value"].lower():
|
||||
self.log.warning("Found a known suspicious file with hash \"%s\" matching indicators from \"%s\"",
|
||||
file_hash, ioc["name"])
|
||||
return ioc
|
||||
|
||||
def check_app_id(self, app_id):
|
||||
"""Check the provided app identifier (typically an Android package name)
|
||||
against the list of indicators.
|
||||
|
||||
:param app_id: App ID to check against the list of indicators
|
||||
:type app_id: str
|
||||
:returns: Indicator details if matched, otherwise None
|
||||
|
||||
"""
|
||||
if not app_id:
|
||||
return None
|
||||
|
||||
for ioc in self.get_iocs("app_ids"):
|
||||
if app_id.lower() == ioc["value"].lower():
|
||||
self.log.warning("Found a known suspicious app with ID \"%s\" matching indicators from \"%s\"",
|
||||
app_id, ioc["name"])
|
||||
return ioc
|
||||
|
||||
|
||||
def download_indicators_files(log):
|
||||
"""
|
||||
Download indicators from repo into MVT app data directory.
|
||||
"""
|
||||
data_dir = user_data_dir("mvt")
|
||||
if not os.path.isdir(data_dir):
|
||||
os.makedirs(data_dir, exist_ok=True)
|
||||
|
||||
# Download latest list of indicators from the MVT repo.
|
||||
res = requests.get("https://github.com/mvt-project/mvt/raw/main/public_indicators.json")
|
||||
if res.status_code != 200:
|
||||
log.warning("Unable to find retrieve list of indicators from the MVT repository.")
|
||||
return
|
||||
|
||||
for ioc_entry in res.json():
|
||||
ioc_url = ioc_entry["stix2_url"]
|
||||
log.info("Downloading indicator file %s from %s", ioc_entry["name"], ioc_url)
|
||||
|
||||
res = requests.get(ioc_url)
|
||||
if res.status_code != 200:
|
||||
log.warning("Could not find indicator file %s", ioc_url)
|
||||
continue
|
||||
|
||||
clean_file_name = ioc_url.lstrip("https://").replace("/", "_")
|
||||
ioc_path = os.path.join(data_dir, clean_file_name)
|
||||
|
||||
# Write file to disk. This will overwrite any older version of the STIX2 file.
|
||||
with open(ioc_path, "w", encoding="utf-8") as handle:
|
||||
handle.write(res.text)
|
||||
|
||||
log.info("Saved indicator file to %s", os.path.basename(ioc_path))
|
||||
@@ -1,25 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
from rich import print
|
||||
|
||||
from .version import MVT_VERSION, check_for_updates
|
||||
|
||||
|
||||
def logo():
|
||||
print("\n")
|
||||
print("\t[bold]MVT[/bold] - Mobile Verification Toolkit")
|
||||
print("\t\thttps://mvt.re")
|
||||
print(f"\t\tVersion: {MVT_VERSION}")
|
||||
|
||||
try:
|
||||
latest_version = check_for_updates()
|
||||
except Exception:
|
||||
pass
|
||||
else:
|
||||
if latest_version:
|
||||
print(f"\t\t[bold]Version {latest_version} is available! Upgrade mvt![/bold]")
|
||||
|
||||
print("\n")
|
||||
@@ -1,203 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import csv
|
||||
import os
|
||||
import re
|
||||
|
||||
import simplejson as json
|
||||
|
||||
|
||||
class DatabaseNotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DatabaseCorruptedError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class InsufficientPrivileges(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class MVTModule(object):
|
||||
"""This class provides a base for all extraction modules."""
|
||||
|
||||
enabled = True
|
||||
slug = None
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=None):
|
||||
"""Initialize module.
|
||||
|
||||
:param file_path: Path to the module's database file, if there is any
|
||||
:type file_path: str
|
||||
:param base_folder: Path to the base folder (backup or filesystem dump)
|
||||
:type file_path: str
|
||||
:param output_folder: Folder where results will be stored
|
||||
:type output_folder: str
|
||||
:param fast_mode: Flag to enable or disable slow modules
|
||||
:type fast_mode: bool
|
||||
:param log: Handle to logger
|
||||
:param results: Provided list of results entries
|
||||
:type results: list
|
||||
"""
|
||||
self.file_path = file_path
|
||||
self.base_folder = base_folder
|
||||
self.output_folder = output_folder
|
||||
self.fast_mode = fast_mode
|
||||
self.log = log
|
||||
self.indicators = None
|
||||
self.results = results if results else []
|
||||
self.detected = []
|
||||
self.timeline = []
|
||||
self.timeline_detected = []
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json_path, log=None):
|
||||
with open(json_path, "r", encoding="utf-8") as handle:
|
||||
results = json.load(handle)
|
||||
if log:
|
||||
log.info("Loaded %d results from \"%s\"",
|
||||
len(results), json_path)
|
||||
return cls(results=results, log=log)
|
||||
|
||||
def get_slug(self):
|
||||
"""Use the module's class name to retrieve a slug"""
|
||||
if self.slug:
|
||||
return self.slug
|
||||
|
||||
sub = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", self.__class__.__name__)
|
||||
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", sub).lower()
|
||||
|
||||
def check_indicators(self):
|
||||
"""Check the results of this module against a provided list of
|
||||
indicators.
|
||||
|
||||
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def save_to_json(self):
|
||||
"""Save the collected results to a json file."""
|
||||
if not self.output_folder:
|
||||
return
|
||||
|
||||
name = self.get_slug()
|
||||
|
||||
if self.results:
|
||||
results_file_name = f"{name}.json"
|
||||
results_json_path = os.path.join(self.output_folder, results_file_name)
|
||||
with open(results_json_path, "w", encoding="utf-8") as handle:
|
||||
try:
|
||||
json.dump(self.results, handle, indent=4, default=str)
|
||||
except Exception as e:
|
||||
self.log.error("Unable to store results of module %s to file %s: %s",
|
||||
self.__class__.__name__, results_file_name, e)
|
||||
|
||||
if self.detected:
|
||||
detected_file_name = f"{name}_detected.json"
|
||||
detected_json_path = os.path.join(self.output_folder, detected_file_name)
|
||||
with open(detected_json_path, "w", encoding="utf-8") as handle:
|
||||
json.dump(self.detected, handle, indent=4, default=str)
|
||||
|
||||
def serialize(self, record):
|
||||
raise NotImplementedError
|
||||
|
||||
@staticmethod
|
||||
def _deduplicate_timeline(timeline):
|
||||
"""Serialize entry as JSON to deduplicate repeated entries
|
||||
|
||||
:param timeline: List of entries from timeline to deduplicate
|
||||
|
||||
"""
|
||||
timeline_set = set()
|
||||
for record in timeline:
|
||||
timeline_set.add(json.dumps(record, sort_keys=True))
|
||||
return [json.loads(record) for record in timeline_set]
|
||||
|
||||
def to_timeline(self):
|
||||
"""Convert results into a timeline."""
|
||||
for result in self.results:
|
||||
record = self.serialize(result)
|
||||
if record:
|
||||
if type(record) == list:
|
||||
self.timeline.extend(record)
|
||||
else:
|
||||
self.timeline.append(record)
|
||||
|
||||
for detected in self.detected:
|
||||
record = self.serialize(detected)
|
||||
if record:
|
||||
if type(record) == list:
|
||||
self.timeline_detected.extend(record)
|
||||
else:
|
||||
self.timeline_detected.append(record)
|
||||
|
||||
# De-duplicate timeline entries.
|
||||
self.timeline = self._deduplicate_timeline(self.timeline)
|
||||
self.timeline_detected = self._deduplicate_timeline(self.timeline_detected)
|
||||
|
||||
def run(self):
|
||||
"""Run the main module procedure."""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def run_module(module):
|
||||
module.log.info("Running module %s...", module.__class__.__name__)
|
||||
|
||||
try:
|
||||
module.run()
|
||||
except NotImplementedError:
|
||||
module.log.exception("The run() procedure of module %s was not implemented yet!",
|
||||
module.__class__.__name__)
|
||||
except InsufficientPrivileges as e:
|
||||
module.log.info("Insufficient privileges for module %s: %s", module.__class__.__name__, e)
|
||||
except DatabaseNotFoundError as e:
|
||||
module.log.info("There might be no data to extract by module %s: %s",
|
||||
module.__class__.__name__, e)
|
||||
except DatabaseCorruptedError as e:
|
||||
module.log.error("The %s module database seems to be corrupted: %s",
|
||||
module.__class__.__name__, e)
|
||||
except Exception as e:
|
||||
module.log.exception("Error in running extraction from module %s: %s",
|
||||
module.__class__.__name__, e)
|
||||
else:
|
||||
try:
|
||||
module.check_indicators()
|
||||
except NotImplementedError:
|
||||
module.log.info("The %s module does not support checking for indicators",
|
||||
module.__class__.__name__)
|
||||
pass
|
||||
else:
|
||||
if module.indicators and not module.detected:
|
||||
module.log.info("The %s module produced no detections!",
|
||||
module.__class__.__name__)
|
||||
|
||||
try:
|
||||
module.to_timeline()
|
||||
except NotImplementedError:
|
||||
pass
|
||||
|
||||
module.save_to_json()
|
||||
|
||||
|
||||
def save_timeline(timeline, timeline_path):
|
||||
"""Save the timeline in a csv file.
|
||||
|
||||
:param timeline: List of records to order and store
|
||||
:param timeline_path: Path to the csv file to store the timeline to
|
||||
|
||||
"""
|
||||
with open(timeline_path, "a+", encoding="utf-8") as handle:
|
||||
csvoutput = csv.writer(handle, delimiter=",", quotechar="\"")
|
||||
csvoutput.writerow(["UTC Timestamp", "Plugin", "Event", "Description"])
|
||||
for event in sorted(timeline, key=lambda x: x["timestamp"] if x["timestamp"] is not None else ""):
|
||||
csvoutput.writerow([
|
||||
event.get("timestamp"),
|
||||
event.get("module"),
|
||||
event.get("event"),
|
||||
event.get("data"),
|
||||
])
|
||||
@@ -1,121 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import datetime
|
||||
import hashlib
|
||||
import re
|
||||
|
||||
|
||||
def convert_mactime_to_unix(timestamp, from_2001=True):
|
||||
"""Converts Mac Standard Time to a Unix timestamp.
|
||||
|
||||
:param timestamp: MacTime timestamp (either int or float).
|
||||
:type timestamp: int
|
||||
:param from_2001: bool: Whether to (Default value = True)
|
||||
:param from_2001: Default value = True)
|
||||
:returns: Unix epoch timestamp.
|
||||
|
||||
"""
|
||||
if not timestamp:
|
||||
return None
|
||||
|
||||
# This is to fix formats in case of, for example, SMS messages database
|
||||
# timestamp format.
|
||||
if type(timestamp) == int and len(str(timestamp)) == 18:
|
||||
timestamp = int(str(timestamp)[:9])
|
||||
|
||||
# MacTime counts from 2001-01-01.
|
||||
if from_2001:
|
||||
timestamp = timestamp + 978307200
|
||||
|
||||
# TODO: This is rather ugly. Happens sometimes with invalid timestamps.
|
||||
try:
|
||||
return datetime.datetime.utcfromtimestamp(timestamp)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def convert_chrometime_to_unix(timestamp):
|
||||
"""Converts Chrome timestamp to a Unix timestamp.
|
||||
|
||||
:param timestamp: Chrome timestamp as int.
|
||||
:type timestamp: int
|
||||
:returns: Unix epoch timestamp.
|
||||
|
||||
"""
|
||||
epoch_start = datetime.datetime(1601, 1, 1)
|
||||
delta = datetime.timedelta(microseconds=timestamp)
|
||||
return epoch_start + delta
|
||||
|
||||
|
||||
def convert_timestamp_to_iso(timestamp):
|
||||
"""Converts Unix timestamp to ISO string.
|
||||
|
||||
:param timestamp: Unix timestamp.
|
||||
:type timestamp: int
|
||||
:returns: ISO timestamp string in YYYY-mm-dd HH:MM:SS.ms format.
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
try:
|
||||
return timestamp.strftime("%Y-%m-%d %H:%M:%S.%f")
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def check_for_links(text):
|
||||
"""Checks if a given text contains HTTP links.
|
||||
|
||||
:param text: Any provided text.
|
||||
:type text: str
|
||||
:returns: Search results.
|
||||
|
||||
"""
|
||||
return re.findall(r"(?P<url>https?://[^\s]+)", text, re.IGNORECASE)
|
||||
|
||||
|
||||
def get_sha256_from_file_path(file_path):
|
||||
"""Calculate the SHA256 hash of a file from a file path.
|
||||
|
||||
:param file_path: Path to the file to hash
|
||||
:returns: The SHA256 hash string
|
||||
|
||||
"""
|
||||
sha256_hash = hashlib.sha256()
|
||||
with open(file_path, "rb") as handle:
|
||||
for byte_block in iter(lambda: handle.read(4096), b""):
|
||||
sha256_hash.update(byte_block)
|
||||
|
||||
return sha256_hash.hexdigest()
|
||||
|
||||
|
||||
# Note: taken from here:
|
||||
# https://stackoverflow.com/questions/57014259/json-dumps-on-dictionary-with-bytes-for-keys
|
||||
def keys_bytes_to_string(obj):
|
||||
"""Convert object keys from bytes to string.
|
||||
|
||||
:param obj: Object to convert from bytes to string.
|
||||
:returns: Object converted to string.
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
new_obj = {}
|
||||
if not isinstance(obj, dict):
|
||||
if isinstance(obj, (tuple, list, set)):
|
||||
value = [keys_bytes_to_string(x) for x in obj]
|
||||
return value
|
||||
else:
|
||||
return obj
|
||||
|
||||
for key, value in obj.items():
|
||||
if isinstance(key, bytes):
|
||||
key = key.decode()
|
||||
if isinstance(value, dict):
|
||||
value = keys_bytes_to_string(value)
|
||||
elif isinstance(value, (tuple, list, set)):
|
||||
value = [keys_bytes_to_string(x) for x in value]
|
||||
new_obj[key] = value
|
||||
|
||||
return new_obj
|
||||
@@ -1,20 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import requests
|
||||
from packaging import version
|
||||
|
||||
MVT_VERSION = "1.5.3"
|
||||
|
||||
|
||||
def check_for_updates():
|
||||
res = requests.get("https://pypi.org/pypi/mvt/json")
|
||||
data = res.json()
|
||||
latest_version = data.get("info", {}).get("version", "")
|
||||
|
||||
if version.parse(latest_version) > version.parse(MVT_VERSION):
|
||||
return latest_version
|
||||
|
||||
return None
|
||||
317
mvt/ios/cli.py
317
mvt/ios/cli.py
@@ -1,317 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import click
|
||||
from rich.logging import RichHandler
|
||||
from rich.prompt import Prompt
|
||||
|
||||
from mvt.common.help import (HELP_MSG_FAST, HELP_MSG_IOC,
|
||||
HELP_MSG_LIST_MODULES, HELP_MSG_MODULE,
|
||||
HELP_MSG_OUTPUT)
|
||||
from mvt.common.indicators import Indicators, download_indicators_files
|
||||
from mvt.common.logo import logo
|
||||
from mvt.common.module import run_module, save_timeline
|
||||
from mvt.common.options import MutuallyExclusiveOption
|
||||
|
||||
from .decrypt import DecryptBackup
|
||||
from .modules.backup import BACKUP_MODULES
|
||||
from .modules.fs import FS_MODULES
|
||||
from .modules.mixed import MIXED_MODULES
|
||||
|
||||
# Setup logging using Rich.
|
||||
LOG_FORMAT = "[%(name)s] %(message)s"
|
||||
logging.basicConfig(level="INFO", format=LOG_FORMAT, handlers=[
|
||||
RichHandler(show_path=False, log_time_format="%X")])
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Set this environment variable to a password if needed.
|
||||
PASSWD_ENV = "MVT_IOS_BACKUP_PASSWORD"
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Main
|
||||
#==============================================================================
|
||||
@click.group(invoke_without_command=False)
|
||||
def cli():
|
||||
logo()
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Command: version
|
||||
#==============================================================================
|
||||
@cli.command("version", help="Show the currently installed version of MVT")
|
||||
def version():
|
||||
return
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Command: decrypt-backup
|
||||
#==============================================================================
|
||||
@cli.command("decrypt-backup", help="Decrypt an encrypted iTunes backup")
|
||||
@click.option("--destination", "-d", required=True,
|
||||
help="Path to the folder where to store the decrypted backup")
|
||||
@click.option("--password", "-p", cls=MutuallyExclusiveOption,
|
||||
help=f"Password to use to decrypt the backup (or, set {PASSWD_ENV} environment variable)",
|
||||
mutually_exclusive=["key_file"])
|
||||
@click.option("--key-file", "-k", cls=MutuallyExclusiveOption,
|
||||
type=click.Path(exists=True),
|
||||
help="File containing raw encryption key to use to decrypt the backup",
|
||||
mutually_exclusive=["password"])
|
||||
@click.argument("BACKUP_PATH", type=click.Path(exists=True))
|
||||
@click.pass_context
|
||||
def decrypt_backup(ctx, destination, password, key_file, backup_path):
|
||||
backup = DecryptBackup(backup_path, destination)
|
||||
|
||||
if key_file:
|
||||
if PASSWD_ENV in os.environ:
|
||||
log.info("Ignoring environment variable, using --key-file '%s' instead",
|
||||
PASSWD_ENV, key_file)
|
||||
|
||||
backup.decrypt_with_key_file(key_file)
|
||||
elif password:
|
||||
log.info("Your password may be visible in the process table because it was supplied on the command line!")
|
||||
|
||||
if PASSWD_ENV in os.environ:
|
||||
log.info("Ignoring %s environment variable, using --password argument instead",
|
||||
PASSWD_ENV)
|
||||
|
||||
backup.decrypt_with_password(password)
|
||||
elif PASSWD_ENV in os.environ:
|
||||
log.info("Using password from %s environment variable", PASSWD_ENV)
|
||||
backup.decrypt_with_password(os.environ[PASSWD_ENV])
|
||||
else:
|
||||
sekrit = Prompt.ask("Enter backup password", password=True)
|
||||
backup.decrypt_with_password(sekrit)
|
||||
|
||||
if not backup.can_process():
|
||||
ctx.exit(1)
|
||||
|
||||
backup.process_backup()
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Command: extract-key
|
||||
#==============================================================================
|
||||
@cli.command("extract-key", help="Extract decryption key from an iTunes backup")
|
||||
@click.option("--password", "-p",
|
||||
help=f"Password to use to decrypt the backup (or, set {PASSWD_ENV} environment variable)")
|
||||
@click.option("--key-file", "-k",
|
||||
help="Key file to be written (if unset, will print to STDOUT)",
|
||||
required=False,
|
||||
type=click.Path(exists=False, file_okay=True, dir_okay=False, writable=True))
|
||||
@click.argument("BACKUP_PATH", type=click.Path(exists=True))
|
||||
def extract_key(password, backup_path, key_file):
|
||||
backup = DecryptBackup(backup_path)
|
||||
|
||||
if password:
|
||||
log.info("Your password may be visible in the process table because it was supplied on the command line!")
|
||||
|
||||
if PASSWD_ENV in os.environ:
|
||||
log.info("Ignoring %s environment variable, using --password argument instead",
|
||||
PASSWD_ENV)
|
||||
elif PASSWD_ENV in os.environ:
|
||||
log.info("Using password from %s environment variable", PASSWD_ENV)
|
||||
password = os.environ[PASSWD_ENV]
|
||||
else:
|
||||
password = Prompt.ask("Enter backup password", password=True)
|
||||
|
||||
backup.decrypt_with_password(password)
|
||||
backup.get_key()
|
||||
|
||||
if key_file:
|
||||
backup.write_key(key_file)
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Command: check-backup
|
||||
#==============================================================================
|
||||
@cli.command("check-backup", help="Extract artifacts from an iTunes backup")
|
||||
@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True,
|
||||
default=[], help=HELP_MSG_IOC)
|
||||
@click.option("--output", "-o", type=click.Path(exists=False), help=HELP_MSG_OUTPUT)
|
||||
@click.option("--fast", "-f", is_flag=True, help=HELP_MSG_FAST)
|
||||
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
|
||||
@click.option("--module", "-m", help=HELP_MSG_MODULE)
|
||||
@click.argument("BACKUP_PATH", type=click.Path(exists=True))
|
||||
@click.pass_context
|
||||
def check_backup(ctx, iocs, output, fast, backup_path, list_modules, module):
|
||||
if list_modules:
|
||||
log.info("Following is the list of available check-backup modules:")
|
||||
for backup_module in BACKUP_MODULES + MIXED_MODULES:
|
||||
log.info(" - %s", backup_module.__name__)
|
||||
|
||||
return
|
||||
|
||||
log.info("Checking iTunes backup located at: %s", backup_path)
|
||||
|
||||
if output and not os.path.exists(output):
|
||||
try:
|
||||
os.makedirs(output)
|
||||
except Exception as e:
|
||||
log.critical("Unable to create output folder %s: %s", output, e)
|
||||
ctx.exit(1)
|
||||
|
||||
indicators = Indicators(log=log)
|
||||
indicators.load_indicators_files(iocs)
|
||||
|
||||
timeline = []
|
||||
timeline_detected = []
|
||||
for backup_module in BACKUP_MODULES + MIXED_MODULES:
|
||||
if module and backup_module.__name__ != module:
|
||||
continue
|
||||
|
||||
m = backup_module(base_folder=backup_path, output_folder=output, fast_mode=fast,
|
||||
log=logging.getLogger(backup_module.__module__))
|
||||
m.is_backup = True
|
||||
if indicators.total_ioc_count > 0:
|
||||
m.indicators = indicators
|
||||
m.indicators.log = m.log
|
||||
|
||||
run_module(m)
|
||||
timeline.extend(m.timeline)
|
||||
timeline_detected.extend(m.timeline_detected)
|
||||
|
||||
if output:
|
||||
if len(timeline) > 0:
|
||||
save_timeline(timeline, os.path.join(output, "timeline.csv"))
|
||||
if len(timeline_detected) > 0:
|
||||
save_timeline(timeline_detected, os.path.join(output, "timeline_detected.csv"))
|
||||
|
||||
if len(timeline_detected) > 0:
|
||||
log.warning("The analysis of the backup produced %d detections!",
|
||||
len(timeline_detected))
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Command: check-fs
|
||||
#==============================================================================
|
||||
@cli.command("check-fs", help="Extract artifacts from a full filesystem dump")
|
||||
@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True,
|
||||
default=[], help=HELP_MSG_IOC)
|
||||
@click.option("--output", "-o", type=click.Path(exists=False), help=HELP_MSG_OUTPUT)
|
||||
@click.option("--fast", "-f", is_flag=True, help=HELP_MSG_FAST)
|
||||
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
|
||||
@click.option("--module", "-m", help=HELP_MSG_MODULE)
|
||||
@click.argument("DUMP_PATH", type=click.Path(exists=True))
|
||||
@click.pass_context
|
||||
def check_fs(ctx, iocs, output, fast, dump_path, list_modules, module):
|
||||
if list_modules:
|
||||
log.info("Following is the list of available check-fs modules:")
|
||||
for fs_module in FS_MODULES + MIXED_MODULES:
|
||||
log.info(" - %s", fs_module.__name__)
|
||||
|
||||
return
|
||||
|
||||
log.info("Checking filesystem dump located at: %s", dump_path)
|
||||
|
||||
if output and not os.path.exists(output):
|
||||
try:
|
||||
os.makedirs(output)
|
||||
except Exception as e:
|
||||
log.critical("Unable to create output folder %s: %s", output, e)
|
||||
ctx.exit(1)
|
||||
|
||||
indicators = Indicators(log=log)
|
||||
indicators.load_indicators_files(iocs)
|
||||
|
||||
timeline = []
|
||||
timeline_detected = []
|
||||
for fs_module in FS_MODULES + MIXED_MODULES:
|
||||
if module and fs_module.__name__ != module:
|
||||
continue
|
||||
|
||||
m = fs_module(base_folder=dump_path, output_folder=output, fast_mode=fast,
|
||||
log=logging.getLogger(fs_module.__module__))
|
||||
|
||||
m.is_fs_dump = True
|
||||
if indicators.total_ioc_count > 0:
|
||||
m.indicators = indicators
|
||||
m.indicators.log = m.log
|
||||
|
||||
run_module(m)
|
||||
timeline.extend(m.timeline)
|
||||
timeline_detected.extend(m.timeline_detected)
|
||||
|
||||
if output:
|
||||
if len(timeline) > 0:
|
||||
save_timeline(timeline, os.path.join(output, "timeline.csv"))
|
||||
if len(timeline_detected) > 0:
|
||||
save_timeline(timeline_detected, os.path.join(output, "timeline_detected.csv"))
|
||||
|
||||
if len(timeline_detected) > 0:
|
||||
log.warning("The analysis of the filesystem produced %d detections!",
|
||||
len(timeline_detected))
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Command: check-iocs
|
||||
#==============================================================================
|
||||
@cli.command("check-iocs", help="Compare stored JSON results to provided indicators")
|
||||
@click.option("--iocs", "-i", type=click.Path(exists=True), multiple=True,
|
||||
default=[], help=HELP_MSG_IOC)
|
||||
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
|
||||
@click.option("--module", "-m", help=HELP_MSG_MODULE)
|
||||
@click.argument("FOLDER", type=click.Path(exists=True))
|
||||
@click.pass_context
|
||||
def check_iocs(ctx, iocs, list_modules, module, folder):
|
||||
all_modules = []
|
||||
for entry in BACKUP_MODULES + FS_MODULES + MIXED_MODULES:
|
||||
if entry not in all_modules:
|
||||
all_modules.append(entry)
|
||||
|
||||
if list_modules:
|
||||
log.info("Following is the list of available check-iocs modules:")
|
||||
for iocs_module in all_modules:
|
||||
log.info(" - %s", iocs_module.__name__)
|
||||
|
||||
return
|
||||
|
||||
log.info("Checking stored results against provided indicators...")
|
||||
|
||||
indicators = Indicators(log=log)
|
||||
indicators.load_indicators_files(iocs)
|
||||
|
||||
total_detections = 0
|
||||
for file_name in os.listdir(folder):
|
||||
name_only, ext = os.path.splitext(file_name)
|
||||
file_path = os.path.join(folder, file_name)
|
||||
|
||||
for iocs_module in all_modules:
|
||||
if module and iocs_module.__name__ != module:
|
||||
continue
|
||||
|
||||
if iocs_module().get_slug() != name_only:
|
||||
continue
|
||||
|
||||
log.info("Loading results from \"%s\" with module %s", file_name,
|
||||
iocs_module.__name__)
|
||||
|
||||
m = iocs_module.from_json(file_path,
|
||||
log=logging.getLogger(iocs_module.__module__))
|
||||
if indicators.total_ioc_count > 0:
|
||||
m.indicators = indicators
|
||||
m.indicators.log = m.log
|
||||
|
||||
try:
|
||||
m.check_indicators()
|
||||
except NotImplementedError:
|
||||
continue
|
||||
else:
|
||||
total_detections += len(m.detected)
|
||||
|
||||
if total_detections > 0:
|
||||
log.warning("The check of the results produced %d detections!",
|
||||
total_detections)
|
||||
|
||||
|
||||
#==============================================================================
|
||||
# Command: download-iocs
|
||||
#==============================================================================
|
||||
@cli.command("download-iocs", help="Download public STIX2 indicators")
|
||||
def download_iocs():
|
||||
download_indicators_files(log)
|
||||
@@ -1,4 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
@@ -1,50 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import os
|
||||
import plistlib
|
||||
|
||||
from mvt.common.module import DatabaseNotFoundError
|
||||
from mvt.ios.versions import latest_ios_version
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
|
||||
class BackupInfo(IOSExtraction):
|
||||
"""This module extracts information about the device and the backup."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.results = {}
|
||||
|
||||
def run(self):
|
||||
info_path = os.path.join(self.base_folder, "Info.plist")
|
||||
if not os.path.exists(info_path):
|
||||
raise DatabaseNotFoundError("No Info.plist at backup path, unable to extract device information")
|
||||
|
||||
with open(info_path, "rb") as handle:
|
||||
info = plistlib.load(handle)
|
||||
|
||||
fields = ["Build Version", "Device Name", "Display Name",
|
||||
"GUID", "ICCID", "IMEI", "MEID", "Installed Applications",
|
||||
"Last Backup Date", "Phone Number", "Product Name",
|
||||
"Product Type", "Product Version", "Serial Number",
|
||||
"Target Identifier", "Target Type", "Unique Identifier",
|
||||
"iTunes Version"]
|
||||
|
||||
for field in fields:
|
||||
value = info.get(field, None)
|
||||
self.log.info("%s: %s", field, value)
|
||||
self.results[field] = value
|
||||
|
||||
if "Product Version" in info:
|
||||
latest = latest_ios_version()
|
||||
if info["Product Version"] != latest["version"]:
|
||||
self.log.warning("This phone is running an outdated iOS version: %s (latest is %s)",
|
||||
info["Product Version"], latest['version'])
|
||||
@@ -1,100 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import os
|
||||
import plistlib
|
||||
from base64 import b64encode
|
||||
|
||||
from mvt.common.utils import convert_timestamp_to_iso
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
CONF_PROFILES_DOMAIN = "SysSharedContainerDomain-systemgroup.com.apple.configurationprofiles"
|
||||
|
||||
|
||||
class ConfigurationProfiles(IOSExtraction):
|
||||
"""This module extracts the full plist data from configuration profiles."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
if not record["install_date"]:
|
||||
return
|
||||
|
||||
payload_name = record['plist'].get('PayloadDisplayName')
|
||||
payload_description = record['plist'].get('PayloadDescription')
|
||||
return {
|
||||
"timestamp": record["install_date"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "configuration_profile_install",
|
||||
"data": f"{record['plist']['PayloadType']} installed: {record['plist']['PayloadUUID']} - {payload_name}: {payload_description}"
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
if result["plist"].get("PayloadUUID"):
|
||||
payload_content = result["plist"]["PayloadContent"][0]
|
||||
|
||||
# Alert on any known malicious configuration profiles in the indicator list.
|
||||
ioc = self.indicators.check_profile(result["plist"]["PayloadUUID"])
|
||||
if ioc:
|
||||
self.log.warning(f"Found a known malicious configuration profile \"{result['plist']['PayloadDisplayName']}\" with UUID '{result['plist']['PayloadUUID']}'.")
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
# Highlight suspicious configuration profiles which may be used to hide notifications.
|
||||
if payload_content["PayloadType"] in ["com.apple.notificationsettings"]:
|
||||
self.log.warning(f"Found a potentially suspicious configuration profile \"{result['plist']['PayloadDisplayName']}\" with payload type '{payload_content['PayloadType']}'.")
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def run(self):
|
||||
for conf_file in self._get_backup_files_from_manifest(domain=CONF_PROFILES_DOMAIN):
|
||||
conf_rel_path = conf_file["relative_path"]
|
||||
# Filter out all configuration files that are not configuration profiles.
|
||||
if not conf_rel_path or not os.path.basename(conf_rel_path).startswith("profile-"):
|
||||
continue
|
||||
|
||||
conf_file_path = self._get_backup_file_from_id(conf_file["file_id"])
|
||||
if not conf_file_path:
|
||||
continue
|
||||
|
||||
with open(conf_file_path, "rb") as handle:
|
||||
try:
|
||||
conf_plist = plistlib.load(handle)
|
||||
except Exception:
|
||||
conf_plist = {}
|
||||
|
||||
if "SignerCerts" in conf_plist:
|
||||
conf_plist["SignerCerts"] = [b64encode(x) for x in conf_plist["SignerCerts"]]
|
||||
if "OTAProfileStub" in conf_plist:
|
||||
if "SignerCerts" in conf_plist["OTAProfileStub"]:
|
||||
conf_plist["OTAProfileStub"]["SignerCerts"] = [b64encode(x) for x in conf_plist["OTAProfileStub"]["SignerCerts"]]
|
||||
if "PushTokenDataSentToServerKey" in conf_plist:
|
||||
conf_plist["PushTokenDataSentToServerKey"] = b64encode(conf_plist["PushTokenDataSentToServerKey"])
|
||||
if "LastPushTokenHash" in conf_plist:
|
||||
conf_plist["LastPushTokenHash"] = b64encode(conf_plist["LastPushTokenHash"])
|
||||
if "PayloadContent" in conf_plist:
|
||||
for x in range(len(conf_plist["PayloadContent"])):
|
||||
if "PERSISTENT_REF" in conf_plist["PayloadContent"][x]:
|
||||
conf_plist["PayloadContent"][x]["PERSISTENT_REF"] = b64encode(conf_plist["PayloadContent"][x]["PERSISTENT_REF"])
|
||||
|
||||
self.results.append({
|
||||
"file_id": conf_file["file_id"],
|
||||
"relative_path": conf_file["relative_path"],
|
||||
"domain": conf_file["domain"],
|
||||
"plist": conf_plist,
|
||||
"install_date": convert_timestamp_to_iso(conf_plist.get("InstallDate")),
|
||||
})
|
||||
|
||||
self.log.info("Extracted details about %d configuration profiles", len(self.results))
|
||||
@@ -1,143 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import datetime
|
||||
import io
|
||||
import os
|
||||
import plistlib
|
||||
import sqlite3
|
||||
|
||||
from mvt.common.module import DatabaseNotFoundError
|
||||
from mvt.common.utils import convert_timestamp_to_iso
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
|
||||
class Manifest(IOSExtraction):
|
||||
"""This module extracts information from a backup Manifest.db file."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def _get_key(self, dictionary, key):
|
||||
"""Unserialized plist objects can have keys which are str or byte types
|
||||
This is a helper to try fetch a key as both a byte or string type.
|
||||
|
||||
:param dictionary:
|
||||
:param key:
|
||||
|
||||
"""
|
||||
return dictionary.get(key.encode("utf-8"), None) or dictionary.get(key, None)
|
||||
|
||||
@staticmethod
|
||||
def _convert_timestamp(timestamp_or_unix_time_int):
|
||||
"""Older iOS versions stored the manifest times as unix timestamps.
|
||||
|
||||
:param timestamp_or_unix_time_int:
|
||||
|
||||
"""
|
||||
if isinstance(timestamp_or_unix_time_int, datetime.datetime):
|
||||
return convert_timestamp_to_iso(timestamp_or_unix_time_int)
|
||||
else:
|
||||
timestamp = datetime.datetime.utcfromtimestamp(timestamp_or_unix_time_int)
|
||||
return convert_timestamp_to_iso(timestamp)
|
||||
|
||||
def serialize(self, record):
|
||||
records = []
|
||||
if "modified" not in record or "status_changed" not in record:
|
||||
return
|
||||
for ts in set([record["created"], record["modified"], record["status_changed"]]):
|
||||
macb = ""
|
||||
macb += "M" if ts == record["modified"] else "-"
|
||||
macb += "-"
|
||||
macb += "C" if ts == record["status_changed"] else "-"
|
||||
macb += "B" if ts == record["created"] else "-"
|
||||
|
||||
records.append({
|
||||
"timestamp": ts,
|
||||
"module": self.__class__.__name__,
|
||||
"event": macb,
|
||||
"data": f"{record['relative_path']} - {record['domain']}"
|
||||
})
|
||||
|
||||
return records
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
if not result.get("relative_path"):
|
||||
continue
|
||||
|
||||
if result["domain"]:
|
||||
if os.path.basename(result["relative_path"]) == "com.apple.CrashReporter.plist" and result["domain"] == "RootDomain":
|
||||
self.log.warning("Found a potentially suspicious \"com.apple.CrashReporter.plist\" file created in RootDomain")
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
if self.indicators.check_file_path("/" + result["relative_path"]):
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
rel_path = result["relative_path"].lower()
|
||||
for ioc in self.indicators.get_iocs("domains"):
|
||||
if ioc["value"].lower() in rel_path:
|
||||
self.log.warning("Found mention of domain \"%s\" in a backup file with path: %s",
|
||||
ioc["value"], rel_path)
|
||||
self.detected.append(result)
|
||||
|
||||
def run(self):
|
||||
manifest_db_path = os.path.join(self.base_folder, "Manifest.db")
|
||||
if not os.path.isfile(manifest_db_path):
|
||||
raise DatabaseNotFoundError("unable to find backup's Manifest.db")
|
||||
|
||||
self.log.info("Found Manifest.db database at path: %s", manifest_db_path)
|
||||
|
||||
conn = sqlite3.connect(manifest_db_path)
|
||||
cur = conn.cursor()
|
||||
|
||||
cur.execute("SELECT * FROM Files;")
|
||||
names = [description[0] for description in cur.description]
|
||||
|
||||
for file_entry in cur:
|
||||
file_data = {}
|
||||
for index, value in enumerate(file_entry):
|
||||
file_data[names[index]] = value
|
||||
|
||||
cleaned_metadata = {
|
||||
"file_id": file_data["fileID"],
|
||||
"domain": file_data["domain"],
|
||||
"relative_path": file_data["relativePath"],
|
||||
"flags": file_data["flags"],
|
||||
"created": "",
|
||||
}
|
||||
|
||||
if file_data["file"]:
|
||||
try:
|
||||
file_plist = plistlib.load(io.BytesIO(file_data["file"]))
|
||||
file_metadata = self._get_key(file_plist, "$objects")[1]
|
||||
cleaned_metadata.update({
|
||||
"created": self._convert_timestamp(self._get_key(file_metadata, "Birth")),
|
||||
"modified": self._convert_timestamp(self._get_key(file_metadata, "LastModified")),
|
||||
"status_changed": self._convert_timestamp(self._get_key(file_metadata, "LastStatusChange")),
|
||||
"mode": oct(self._get_key(file_metadata, "Mode")),
|
||||
"owner": self._get_key(file_metadata, "UserID"),
|
||||
"size": self._get_key(file_metadata, "Size"),
|
||||
})
|
||||
except Exception:
|
||||
self.log.exception("Error reading manifest file metadata for file with ID %s and relative path %s",
|
||||
file_data["fileID"], file_data["relativePath"])
|
||||
pass
|
||||
|
||||
self.results.append(cleaned_metadata)
|
||||
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
self.log.info("Extracted a total of %d file metadata items", len(self.results))
|
||||
@@ -1,61 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import plistlib
|
||||
|
||||
from mvt.common.utils import convert_timestamp_to_iso
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
CONF_PROFILES_EVENTS_RELPATH = "Library/ConfigurationProfiles/MCProfileEvents.plist"
|
||||
|
||||
|
||||
class ProfileEvents(IOSExtraction):
|
||||
"""This module extracts events related to the installation of configuration
|
||||
profiles.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
return {
|
||||
"timestamp": record.get("timestamp"),
|
||||
"module": self.__class__.__name__,
|
||||
"event": "profile_operation",
|
||||
"data": f"Process {record.get('process')} started operation {record.get('operation')} of profile {record.get('profile_id')}"
|
||||
}
|
||||
|
||||
def run(self):
|
||||
for events_file in self._get_backup_files_from_manifest(relative_path=CONF_PROFILES_EVENTS_RELPATH):
|
||||
events_file_path = self._get_backup_file_from_id(events_file["file_id"])
|
||||
if not events_file_path:
|
||||
continue
|
||||
|
||||
with open(events_file_path, "rb") as handle:
|
||||
events_plist = plistlib.load(handle)
|
||||
|
||||
if "ProfileEvents" not in events_plist:
|
||||
continue
|
||||
|
||||
for event in events_plist["ProfileEvents"]:
|
||||
key = list(event.keys())[0]
|
||||
self.log.info("On %s process \"%s\" started operation \"%s\" of profile \"%s\"",
|
||||
event[key].get("timestamp"), event[key].get("process"),
|
||||
event[key].get("operation"), key)
|
||||
|
||||
self.results.append({
|
||||
"profile_id": key,
|
||||
"timestamp": convert_timestamp_to_iso(event[key].get("timestamp")),
|
||||
"operation": event[key].get("operation"),
|
||||
"process": event[key].get("process"),
|
||||
})
|
||||
|
||||
self.log.info("Extracted %d profile events", len(self.results))
|
||||
@@ -1,157 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
import sqlite3
|
||||
import subprocess
|
||||
|
||||
from mvt.common.module import (DatabaseCorruptedError, DatabaseNotFoundError,
|
||||
MVTModule)
|
||||
|
||||
|
||||
class IOSExtraction(MVTModule):
|
||||
"""This class provides a base for all iOS filesystem/backup extraction modules."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.is_backup = False
|
||||
self.is_fs_dump = False
|
||||
self.is_sysdiagnose = False
|
||||
|
||||
def _recover_sqlite_db_if_needed(self, file_path, forced=False):
|
||||
"""Tries to recover a malformed database by running a .clone command.
|
||||
|
||||
:param file_path: Path to the malformed database file.
|
||||
|
||||
"""
|
||||
# TODO: Find a better solution.
|
||||
if not forced:
|
||||
conn = sqlite3.connect(file_path)
|
||||
cur = conn.cursor()
|
||||
|
||||
try:
|
||||
recover = False
|
||||
cur.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
||||
except sqlite3.DatabaseError as e:
|
||||
if "database disk image is malformed" in str(e):
|
||||
recover = True
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
if not recover:
|
||||
return
|
||||
|
||||
self.log.info("Database at path %s is malformed. Trying to recover...", file_path)
|
||||
|
||||
if not shutil.which("sqlite3"):
|
||||
raise DatabaseCorruptedError("failed to recover without sqlite3 binary: please install sqlite3!")
|
||||
if '"' in file_path:
|
||||
raise DatabaseCorruptedError(f"database at path '{file_path}' is corrupted. unable to recover because it has a quotation mark (\") in its name")
|
||||
|
||||
bak_path = f"{file_path}.bak"
|
||||
shutil.move(file_path, bak_path)
|
||||
|
||||
ret = subprocess.call(["sqlite3", bak_path, f".clone \"{file_path}\""],
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
if ret != 0:
|
||||
raise DatabaseCorruptedError("failed to recover database")
|
||||
|
||||
self.log.info("Database at path %s recovered successfully!", file_path)
|
||||
|
||||
def _get_backup_files_from_manifest(self, relative_path=None, domain=None):
|
||||
"""Locate files from Manifest.db.
|
||||
|
||||
:param relative_path: Relative path to use as filter from Manifest.db. (Default value = None)
|
||||
:param domain: Domain to use as filter from Manifest.db. (Default value = None)
|
||||
|
||||
"""
|
||||
manifest_db_path = os.path.join(self.base_folder, "Manifest.db")
|
||||
if not os.path.exists(manifest_db_path):
|
||||
raise DatabaseNotFoundError("unable to find backup's Manifest.db")
|
||||
|
||||
base_sql = "SELECT fileID, domain, relativePath FROM Files WHERE "
|
||||
|
||||
try:
|
||||
conn = sqlite3.connect(manifest_db_path)
|
||||
cur = conn.cursor()
|
||||
if relative_path and domain:
|
||||
cur.execute(f"{base_sql} relativePath = ? AND domain = ?;",
|
||||
(relative_path, domain))
|
||||
else:
|
||||
if relative_path:
|
||||
cur.execute(f"{base_sql} relativePath = ?;", (relative_path,))
|
||||
elif domain:
|
||||
cur.execute(f"{base_sql} domain = ?;", (domain,))
|
||||
except Exception as e:
|
||||
raise DatabaseCorruptedError("failed to query Manifest.db: %s", e)
|
||||
|
||||
for row in cur:
|
||||
yield {
|
||||
"file_id": row[0],
|
||||
"domain": row[1],
|
||||
"relative_path": row[2],
|
||||
}
|
||||
|
||||
def _get_backup_file_from_id(self, file_id):
|
||||
file_path = os.path.join(self.base_folder, file_id[0:2], file_id)
|
||||
if os.path.exists(file_path):
|
||||
return file_path
|
||||
|
||||
return None
|
||||
|
||||
def _get_fs_files_from_patterns(self, root_paths):
|
||||
for root_path in root_paths:
|
||||
for found_path in glob.glob(os.path.join(self.base_folder, root_path)):
|
||||
if not os.path.exists(found_path):
|
||||
continue
|
||||
|
||||
yield found_path
|
||||
|
||||
def _find_ios_database(self, backup_ids=None, root_paths=[]):
|
||||
"""Try to locate a module's database file from either an iTunes
|
||||
backup or a full filesystem dump. This is intended only for
|
||||
modules that expect to work with a single SQLite database.
|
||||
If a module requires to process multiple databases or files,
|
||||
you should use the helper functions above.
|
||||
|
||||
:param backup_id: iTunes backup database file's ID (or hash).
|
||||
:param root_paths: Glob patterns for files to seek in filesystem dump. (Default value = [])
|
||||
:param backup_ids: Default value = None)
|
||||
|
||||
"""
|
||||
file_path = None
|
||||
# First we check if the was an explicit file path specified.
|
||||
if not self.file_path:
|
||||
# If not, we first try with backups.
|
||||
# We construct the path to the file according to the iTunes backup
|
||||
# folder structure, if we have a valid ID.
|
||||
if backup_ids:
|
||||
for backup_id in backup_ids:
|
||||
file_path = self._get_backup_file_from_id(backup_id)
|
||||
if file_path:
|
||||
break
|
||||
|
||||
# If this file does not exist we might be processing a full
|
||||
# filesystem dump (checkra1n all the things!).
|
||||
if not file_path or not os.path.exists(file_path):
|
||||
# We reset the file_path.
|
||||
file_path = None
|
||||
for found_path in self._get_fs_files_from_patterns(root_paths):
|
||||
file_path = found_path
|
||||
break
|
||||
|
||||
# If we do not find any, we fail.
|
||||
if file_path:
|
||||
self.file_path = file_path
|
||||
else:
|
||||
raise DatabaseNotFoundError("unable to find the module's database file")
|
||||
|
||||
self._recover_sqlite_db_if_needed(self.file_path)
|
||||
@@ -1,80 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import os
|
||||
import sqlite3
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
|
||||
class CacheFiles(IOSExtraction):
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
records = []
|
||||
for item in self.results[record]:
|
||||
records.append({
|
||||
"timestamp": item["isodate"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "cache_response",
|
||||
"data": f"{record} recorded visit to URL {item['url']}"
|
||||
})
|
||||
|
||||
return records
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
self.detected = {}
|
||||
for key, values in self.results.items():
|
||||
for value in values:
|
||||
ioc = self.indicators.check_domain(value["url"])
|
||||
if ioc:
|
||||
value["matched_indicator"] = ioc
|
||||
if key not in self.detected:
|
||||
self.detected[key] = [value, ]
|
||||
else:
|
||||
self.detected[key].append(value)
|
||||
|
||||
def _process_cache_file(self, file_path):
|
||||
self.log.info("Processing cache file at path: %s", file_path)
|
||||
|
||||
conn = sqlite3.connect(file_path)
|
||||
cur = conn.cursor()
|
||||
|
||||
try:
|
||||
cur.execute("SELECT * FROM cfurl_cache_response;")
|
||||
except sqlite3.OperationalError:
|
||||
return
|
||||
|
||||
key_name = os.path.relpath(file_path, self.base_folder)
|
||||
if key_name not in self.results:
|
||||
self.results[key_name] = []
|
||||
|
||||
for row in cur:
|
||||
self.results[key_name].append({
|
||||
"entry_id": row[0],
|
||||
"version": row[1],
|
||||
"hash_value": row[2],
|
||||
"storage_policy": row[3],
|
||||
"url": row[4],
|
||||
"isodate": row[5],
|
||||
})
|
||||
|
||||
def run(self):
|
||||
self.results = {}
|
||||
for root, dirs, files in os.walk(self.base_folder):
|
||||
for file_name in files:
|
||||
if file_name != "Cache.db":
|
||||
continue
|
||||
|
||||
file_path = os.path.join(root, file_name)
|
||||
self._process_cache_file(file_path)
|
||||
@@ -1,39 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import sqlite3
|
||||
|
||||
from ..net_base import NetBase
|
||||
|
||||
NETUSAGE_ROOT_PATHS = [
|
||||
"private/var/networkd/netusage.sqlite",
|
||||
"private/var/networkd/db/netusage.sqlite"
|
||||
]
|
||||
|
||||
|
||||
class Netusage(NetBase):
|
||||
"""This class extracts data from netusage.sqlite and attempts to identify
|
||||
any suspicious processes if running on a full filesystem dump.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def run(self):
|
||||
for netusage_path in self._get_fs_files_from_patterns(NETUSAGE_ROOT_PATHS):
|
||||
self.file_path = netusage_path
|
||||
self.log.info("Found NetUsage database at path: %s", self.file_path)
|
||||
try:
|
||||
self._extract_net_data()
|
||||
except sqlite3.OperationalError as e:
|
||||
self.log.info("Skipping this NetUsage database because it seems empty or malformed: %s", e)
|
||||
continue
|
||||
|
||||
self._find_suspicious_processes()
|
||||
@@ -1,90 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
SHUTDOWN_LOG_PATH = [
|
||||
"private/var/db/diagnostics/shutdown.log",
|
||||
]
|
||||
|
||||
|
||||
class ShutdownLog(IOSExtraction):
|
||||
"""This module extracts processes information from the shutdown log file."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
return {
|
||||
"timestamp": record["isodate"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "shutdown",
|
||||
"data": f"Client {record['client']} with PID {record['pid']} was running when the device was shut down",
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_file_path(result["client"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
for ioc in self.indicators.get_iocs("processes"):
|
||||
parts = result["client"].split("/")
|
||||
if ioc in parts:
|
||||
self.log.warning("Found mention of a known malicious process \"%s\" in shutdown.log",
|
||||
ioc)
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def process_shutdownlog(self, content):
|
||||
current_processes = []
|
||||
for line in content.split("\n"):
|
||||
line = line.strip()
|
||||
|
||||
if line.startswith("remaining client pid:"):
|
||||
current_processes.append({
|
||||
"pid": line[line.find("pid: ")+5:line.find(" (")],
|
||||
"client": line[line.find("(")+1:line.find(")")],
|
||||
})
|
||||
elif line.startswith("SIGTERM: "):
|
||||
try:
|
||||
mac_timestamp = int(line[line.find("[")+1:line.find("]")])
|
||||
except ValueError:
|
||||
try:
|
||||
start = line.find(" @")+2
|
||||
mac_timestamp = int(line[start:start+10])
|
||||
except Exception:
|
||||
mac_timestamp = 0
|
||||
|
||||
timestamp = convert_mactime_to_unix(mac_timestamp, from_2001=False)
|
||||
isodate = convert_timestamp_to_iso(timestamp)
|
||||
|
||||
for current_process in current_processes:
|
||||
self.results.append({
|
||||
"isodate": isodate,
|
||||
"pid": current_process["pid"],
|
||||
"client": current_process["client"],
|
||||
})
|
||||
|
||||
current_processes = []
|
||||
|
||||
self.results = sorted(self.results, key=lambda entry: entry["isodate"])
|
||||
|
||||
def run(self):
|
||||
self._find_ios_database(root_paths=SHUTDOWN_LOG_PATH)
|
||||
self.log.info("Found shutdown log at path: %s", self.file_path)
|
||||
with open(self.file_path, "r", encoding="utf-8") as handle:
|
||||
self.process_shutdownlog(handle.read())
|
||||
@@ -1,29 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
from .webkit_base import WebkitBase
|
||||
|
||||
WEBKIT_SAFARIVIEWSERVICE_ROOT_PATHS = [
|
||||
"private/var/mobile/Containers/Data/Application/*/SystemData/com.apple.SafariViewService/Library/WebKit/WebsiteData/",
|
||||
]
|
||||
|
||||
|
||||
class WebkitSafariViewService(WebkitBase):
|
||||
"""This module looks extracts records from WebKit LocalStorage folders,
|
||||
and checks them against any provided list of suspicious domains.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def run(self):
|
||||
self._process_webkit_folder(WEBKIT_SAFARIVIEWSERVICE_ROOT_PATHS)
|
||||
self.log.info("Extracted a total of %d records from WebKit SafariViewService WebsiteData",
|
||||
len(self.results))
|
||||
@@ -1,63 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import sqlite3
|
||||
|
||||
from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
CALLS_BACKUP_IDS = [
|
||||
"5a4935c78a5255723f707230a451d79c540d2741",
|
||||
]
|
||||
CALLS_ROOT_PATHS = [
|
||||
"private/var/mobile/Library/CallHistoryDB/CallHistory.storedata"
|
||||
]
|
||||
|
||||
|
||||
class Calls(IOSExtraction):
|
||||
"""This module extracts phone calls details"""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
return {
|
||||
"timestamp": record["isodate"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "call",
|
||||
"data": f"From {record['number']} using {record['provider']} during {record['duration']} seconds"
|
||||
}
|
||||
|
||||
def run(self):
|
||||
self._find_ios_database(backup_ids=CALLS_BACKUP_IDS,
|
||||
root_paths=CALLS_ROOT_PATHS)
|
||||
self.log.info("Found Calls database at path: %s", self.file_path)
|
||||
|
||||
conn = sqlite3.connect(self.file_path)
|
||||
cur = conn.cursor()
|
||||
cur.execute("""
|
||||
SELECT
|
||||
ZDATE, ZDURATION, ZLOCATION, ZADDRESS, ZSERVICE_PROVIDER
|
||||
FROM ZCALLRECORD;
|
||||
""")
|
||||
# names = [description[0] for description in cur.description]
|
||||
|
||||
for row in cur:
|
||||
self.results.append({
|
||||
"isodate": convert_timestamp_to_iso(convert_mactime_to_unix(row[0])),
|
||||
"duration": row[1],
|
||||
"location": row[2],
|
||||
"number": row[3].decode("utf-8") if row[3] and row[3] is bytes else row[3],
|
||||
"provider": row[4]
|
||||
})
|
||||
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
self.log.info("Extracted a total of %d calls", len(self.results))
|
||||
@@ -1,81 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import sqlite3
|
||||
|
||||
from mvt.common.utils import (convert_chrometime_to_unix,
|
||||
convert_timestamp_to_iso)
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
CHROME_HISTORY_BACKUP_IDS = [
|
||||
"faf971ce92c3ac508c018dce1bef2a8b8e9838f1",
|
||||
]
|
||||
# TODO: Confirm Chrome database path.
|
||||
CHROME_HISTORY_ROOT_PATHS = [
|
||||
"private/var/mobile/Containers/Data/Application/*/Library/Application Support/Google/Chrome/Default/History",
|
||||
]
|
||||
|
||||
|
||||
class ChromeHistory(IOSExtraction):
|
||||
"""This module extracts all Chome visits."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
return {
|
||||
"timestamp": record["isodate"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "visit",
|
||||
"data": f"{record['id']} - {record['url']} (visit ID: {record['visit_id']}, redirect source: {record['redirect_source']})"
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_domain(result["url"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
|
||||
def run(self):
|
||||
self._find_ios_database(backup_ids=CHROME_HISTORY_BACKUP_IDS,
|
||||
root_paths=CHROME_HISTORY_ROOT_PATHS)
|
||||
self.log.info("Found Chrome history database at path: %s", self.file_path)
|
||||
|
||||
conn = sqlite3.connect(self.file_path)
|
||||
cur = conn.cursor()
|
||||
cur.execute("""
|
||||
SELECT
|
||||
urls.id,
|
||||
urls.url,
|
||||
visits.id,
|
||||
visits.visit_time,
|
||||
visits.from_visit
|
||||
FROM urls
|
||||
JOIN visits ON visits.url = urls.id
|
||||
ORDER BY visits.visit_time;
|
||||
""")
|
||||
|
||||
for item in cur:
|
||||
self.results.append({
|
||||
"id": item[0],
|
||||
"url": item[1],
|
||||
"visit_id": item[2],
|
||||
"timestamp": item[3],
|
||||
"isodate": convert_timestamp_to_iso(convert_chrometime_to_unix(item[3])),
|
||||
"redirect_source": item[4],
|
||||
})
|
||||
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
self.log.info("Extracted a total of %d history items", len(self.results))
|
||||
@@ -1,54 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import sqlite3
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
CONTACTS_BACKUP_IDS = [
|
||||
"31bb7ba8914766d4ba40d6dfb6113c8b614be442",
|
||||
]
|
||||
CONTACTS_ROOT_PATHS = [
|
||||
"private/var/mobile/Library/AddressBook/AddressBook.sqlitedb",
|
||||
]
|
||||
|
||||
|
||||
class Contacts(IOSExtraction):
|
||||
"""This module extracts all contact details from the phone's address book."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def run(self):
|
||||
self._find_ios_database(backup_ids=CONTACTS_BACKUP_IDS, root_paths=CONTACTS_ROOT_PATHS)
|
||||
self.log.info("Found Contacts database at path: %s", self.file_path)
|
||||
|
||||
conn = sqlite3.connect(self.file_path)
|
||||
cur = conn.cursor()
|
||||
cur.execute("""
|
||||
SELECT
|
||||
multi.value, person.first, person.middle, person.last,
|
||||
person.organization
|
||||
FROM ABPerson person, ABMultiValue multi
|
||||
WHERE person.rowid = multi.record_id and multi.value not null
|
||||
ORDER by person.rowid ASC;
|
||||
""")
|
||||
names = [description[0] for description in cur.description]
|
||||
|
||||
for row in cur:
|
||||
new_contact = {}
|
||||
for index, value in enumerate(row):
|
||||
new_contact[names[index]] = value
|
||||
|
||||
self.results.append(new_contact)
|
||||
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
self.log.info("Extracted a total of %d contacts from the address book",
|
||||
len(self.results))
|
||||
@@ -1,88 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import sqlite3
|
||||
from datetime import datetime
|
||||
|
||||
from mvt.common.utils import convert_timestamp_to_iso
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
FIREFOX_HISTORY_BACKUP_IDS = [
|
||||
"2e57c396a35b0d1bcbc624725002d98bd61d142b",
|
||||
]
|
||||
FIREFOX_HISTORY_ROOT_PATHS = [
|
||||
"private/var/mobile/profile.profile/browser.db",
|
||||
]
|
||||
|
||||
|
||||
class FirefoxFavicon(IOSExtraction):
|
||||
"""This module extracts all Firefox favicon"""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
return {
|
||||
"timestamp": record["isodate"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "firefox_history",
|
||||
"data": f"Firefox favicon {record['url']} when visiting {record['history_url']}",
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_domain(result.get("url", ""))
|
||||
if not ioc:
|
||||
ioc = self.indicators.check_domain(result.get("history_url", ""))
|
||||
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
|
||||
def run(self):
|
||||
self._find_ios_database(backup_ids=FIREFOX_HISTORY_BACKUP_IDS,
|
||||
root_paths=FIREFOX_HISTORY_ROOT_PATHS)
|
||||
self.log.info("Found Firefox favicon database at path: %s", self.file_path)
|
||||
|
||||
conn = sqlite3.connect(self.file_path)
|
||||
cur = conn.cursor()
|
||||
cur.execute("""
|
||||
SELECT
|
||||
favicons.id,
|
||||
favicons.url,
|
||||
favicons.width,
|
||||
favicons.height,
|
||||
favicons.type,
|
||||
favicons.date,
|
||||
history.id,
|
||||
history.url
|
||||
FROM favicons
|
||||
INNER JOIN favicon_sites ON favicon_sites.faviconID = favicons.id
|
||||
INNER JOIN history ON favicon_sites.siteID = history.id;
|
||||
""")
|
||||
|
||||
for item in cur:
|
||||
self.results.append({
|
||||
"id": item[0],
|
||||
"url": item[1],
|
||||
"width": item[2],
|
||||
"height": item[3],
|
||||
"type": item[4],
|
||||
"isodate": convert_timestamp_to_iso(datetime.utcfromtimestamp(item[5])),
|
||||
"history_id": item[6],
|
||||
"history_url": item[7]
|
||||
})
|
||||
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
self.log.info("Extracted a total of %d history items", len(self.results))
|
||||
@@ -1,174 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import sqlite3
|
||||
|
||||
from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
INTERACTIONC_BACKUP_IDS = [
|
||||
"1f5a521220a3ad80ebfdc196978df8e7a2e49dee",
|
||||
]
|
||||
INTERACTIONC_ROOT_PATHS = [
|
||||
"private/var/mobile/Library/CoreDuet/People/interactionC.db",
|
||||
]
|
||||
|
||||
|
||||
class InteractionC(IOSExtraction):
|
||||
"""This module extracts data from InteractionC db."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.timestamps = [
|
||||
"start_date",
|
||||
"end_date",
|
||||
"interactions_creation_date",
|
||||
"contacts_creation_date",
|
||||
"first_incoming_recipient_date",
|
||||
"first_incoming_sender_date",
|
||||
"first_outgoing_recipient_date",
|
||||
"last_incoming_sender_date",
|
||||
"last_incoming_recipient_date",
|
||||
"last_outgoing_recipient_date",
|
||||
]
|
||||
|
||||
def serialize(self, record):
|
||||
records = []
|
||||
processed = []
|
||||
for ts in self.timestamps:
|
||||
# Check if the record has the current timestamp.
|
||||
if ts not in record or not record[ts]:
|
||||
continue
|
||||
|
||||
# Check if the timestamp was already processed.
|
||||
if record[ts] in processed:
|
||||
continue
|
||||
|
||||
records.append({
|
||||
"timestamp": record[ts],
|
||||
"module": self.__class__.__name__,
|
||||
"event": ts,
|
||||
"data": f"[{record['bundle_id']}] {record['account']} - from {record['sender_display_name']} "
|
||||
f"({record['sender_identifier']}) to {record['recipient_display_name']} "
|
||||
f"({record['recipient_identifier']}): {record['content']}"
|
||||
})
|
||||
processed.append(record[ts])
|
||||
|
||||
return records
|
||||
|
||||
def run(self):
|
||||
self._find_ios_database(backup_ids=INTERACTIONC_BACKUP_IDS, root_paths=INTERACTIONC_ROOT_PATHS)
|
||||
self.log.info("Found InteractionC database at path: %s", self.file_path)
|
||||
|
||||
conn = sqlite3.connect(self.file_path)
|
||||
cur = conn.cursor()
|
||||
|
||||
# TODO: Support all versions.
|
||||
# Taken from:
|
||||
# https://github.com/mac4n6/APOLLO/blob/master/modules/interaction_contact_interactions.txt
|
||||
cur.execute("""
|
||||
SELECT
|
||||
ZINTERACTIONS.ZSTARTDATE,
|
||||
ZINTERACTIONS.ZENDDATE,
|
||||
ZINTERACTIONS.ZBUNDLEID,
|
||||
ZINTERACTIONS.ZACCOUNT,
|
||||
ZINTERACTIONS.ZTARGETBUNDLEID,
|
||||
CASE ZINTERACTIONS.ZDIRECTION
|
||||
WHEN '0' THEN 'INCOMING'
|
||||
WHEN '1' THEN 'OUTGOING'
|
||||
END 'DIRECTION',
|
||||
ZCONTACTS.ZDISPLAYNAME,
|
||||
ZCONTACTS.ZIDENTIFIER,
|
||||
ZCONTACTS.ZPERSONID,
|
||||
RECEIPIENTCONACT.ZDISPLAYNAME,
|
||||
RECEIPIENTCONACT.ZIDENTIFIER,
|
||||
RECEIPIENTCONACT.ZPERSONID,
|
||||
ZINTERACTIONS.ZRECIPIENTCOUNT,
|
||||
ZINTERACTIONS.ZDOMAINIDENTIFIER,
|
||||
ZINTERACTIONS.ZISRESPONSE,
|
||||
ZATTACHMENT.ZCONTENTTEXT,
|
||||
ZATTACHMENT.ZUTI,
|
||||
ZATTACHMENT.ZCONTENTURL,
|
||||
ZATTACHMENT.ZSIZEINBYTES,
|
||||
ZATTACHMENT.ZPHOTOLOCALIDENTIFIER,
|
||||
HEX(ZATTACHMENT.ZIDENTIFIER),
|
||||
ZATTACHMENT.ZCLOUDIDENTIFIER,
|
||||
ZCONTACTS.ZINCOMINGRECIPIENTCOUNT,
|
||||
ZCONTACTS.ZINCOMINGSENDERCOUNT,
|
||||
ZCONTACTS.ZOUTGOINGRECIPIENTCOUNT,
|
||||
ZINTERACTIONS.ZCREATIONDATE,
|
||||
ZCONTACTS.ZCREATIONDATE,
|
||||
ZCONTACTS.ZFIRSTINCOMINGRECIPIENTDATE,
|
||||
ZCONTACTS.ZFIRSTINCOMINGSENDERDATE,
|
||||
ZCONTACTS.ZFIRSTOUTGOINGRECIPIENTDATE,
|
||||
ZCONTACTS.ZLASTINCOMINGSENDERDATE,
|
||||
ZCONTACTS.ZLASTINCOMINGRECIPIENTDATE,
|
||||
ZCONTACTS.ZLASTOUTGOINGRECIPIENTDATE,
|
||||
ZCONTACTS.ZCUSTOMIDENTIFIER,
|
||||
ZINTERACTIONS.ZCONTENTURL,
|
||||
ZINTERACTIONS.ZLOCATIONUUID,
|
||||
ZINTERACTIONS.ZGROUPNAME,
|
||||
ZINTERACTIONS.ZDERIVEDINTENTIDENTIFIER,
|
||||
ZINTERACTIONS.Z_PK
|
||||
FROM ZINTERACTIONS
|
||||
LEFT JOIN ZCONTACTS ON ZINTERACTIONS.ZSENDER = ZCONTACTS.Z_PK
|
||||
LEFT JOIN Z_1INTERACTIONS ON ZINTERACTIONS.Z_PK == Z_1INTERACTIONS.Z_3INTERACTIONS
|
||||
LEFT JOIN ZATTACHMENT ON Z_1INTERACTIONS.Z_1ATTACHMENTS == ZATTACHMENT.Z_PK
|
||||
LEFT JOIN Z_2INTERACTIONRECIPIENT ON ZINTERACTIONS.Z_PK== Z_2INTERACTIONRECIPIENT.Z_3INTERACTIONRECIPIENT
|
||||
LEFT JOIN ZCONTACTS RECEIPIENTCONACT ON Z_2INTERACTIONRECIPIENT.Z_2RECIPIENTS== RECEIPIENTCONACT.Z_PK;
|
||||
""")
|
||||
# names = [description[0] for description in cur.description]
|
||||
|
||||
for row in cur:
|
||||
self.results.append({
|
||||
"start_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[0])),
|
||||
"end_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[1])),
|
||||
"bundle_id": row[2],
|
||||
"account": row[3],
|
||||
"target_bundle_id": row[4],
|
||||
"direction": row[5],
|
||||
"sender_display_name": row[6],
|
||||
"sender_identifier": row[7],
|
||||
"sender_personid": row[8],
|
||||
"recipient_display_name": row[9],
|
||||
"recipient_identifier": row[10],
|
||||
"recipient_personid": row[11],
|
||||
"recipient_count": row[12],
|
||||
"domain_identifier": row[13],
|
||||
"is_response": row[14],
|
||||
"content": row[15],
|
||||
"uti": row[16],
|
||||
"content_url": row[17],
|
||||
"size": row[18],
|
||||
"photo_local_id": row[19],
|
||||
"attachment_id": row[20],
|
||||
"cloud_id": row[21],
|
||||
"incoming_recipient_count": row[22],
|
||||
"incoming_sender_count": row[23],
|
||||
"outgoing_recipient_count": row[24],
|
||||
"interactions_creation_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[25])) if row[25] else None,
|
||||
"contacts_creation_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[26])) if row[26] else None,
|
||||
"first_incoming_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[27])) if row[27] else None,
|
||||
"first_incoming_sender_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[28])) if row[28] else None,
|
||||
"first_outgoing_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[29])) if row[29] else None,
|
||||
"last_incoming_sender_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[30])) if row[30] else None,
|
||||
"last_incoming_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[31])) if row[31] else None,
|
||||
"last_outgoing_recipient_date": convert_timestamp_to_iso(convert_mactime_to_unix(row[32])) if row[32] else None,
|
||||
"custom_id": row[33],
|
||||
"location_uuid": row[35],
|
||||
"group_name": row[36],
|
||||
"derivied_intent_id": row[37],
|
||||
"table_id": row[38]
|
||||
})
|
||||
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
self.log.info("Extracted a total of %d InteractionC events", len(self.results))
|
||||
@@ -1,123 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import plistlib
|
||||
|
||||
from mvt.common.utils import convert_mactime_to_unix, convert_timestamp_to_iso
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
LOCATIOND_BACKUP_IDS = [
|
||||
"a690d7769cce8904ca2b67320b107c8fe5f79412",
|
||||
]
|
||||
LOCATIOND_ROOT_PATHS = [
|
||||
"private/var/mobile/Library/Caches/locationd/clients.plist",
|
||||
"private/var/root/Library/Caches/locationd/clients.plist"
|
||||
]
|
||||
|
||||
|
||||
class LocationdClients(IOSExtraction):
|
||||
"""Extract information from apps who used geolocation."""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.timestamps = [
|
||||
"ConsumptionPeriodBegin",
|
||||
"ReceivingLocationInformationTimeStopped",
|
||||
"VisitTimeStopped",
|
||||
"LocationTimeStopped",
|
||||
"BackgroundLocationTimeStopped",
|
||||
"SignificantTimeStopped",
|
||||
"NonPersistentSignificantTimeStopped",
|
||||
"FenceTimeStopped",
|
||||
"BeaconRegionTimeStopped",
|
||||
]
|
||||
|
||||
def serialize(self, record):
|
||||
records = []
|
||||
for timestamp in self.timestamps:
|
||||
if timestamp in record.keys():
|
||||
records.append({
|
||||
"timestamp": record[timestamp],
|
||||
"module": self.__class__.__name__,
|
||||
"event": timestamp,
|
||||
"data": f"{timestamp} from {record['package']}"
|
||||
})
|
||||
|
||||
return records
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
parts = result["package"].split("/")
|
||||
proc_name = parts[len(parts)-1]
|
||||
|
||||
ioc = self.indicators.check_process(proc_name)
|
||||
if ioc:
|
||||
self.log.warning("Found a suspicious process name in LocationD entry %s",
|
||||
result["package"])
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
if "BundlePath" in result:
|
||||
ioc = self.indicators.check_file_path(result["BundlePath"])
|
||||
if ioc:
|
||||
self.log.warning("Found a suspicious file path in Location D: %s",
|
||||
result["BundlePath"])
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
if "Executable" in result:
|
||||
ioc = self.indicators.check_file_path(result["Executable"])
|
||||
if ioc:
|
||||
self.log.warning("Found a suspicious file path in Location D: %s",
|
||||
result["Executable"])
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
if "Registered" in result:
|
||||
ioc = self.indicators.check_file_path(result["Registered"])
|
||||
if ioc:
|
||||
self.log.warning("Found a suspicious file path in Location D: %s",
|
||||
result["Registered"])
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
def _extract_locationd_entries(self, file_path):
|
||||
with open(file_path, "rb") as handle:
|
||||
file_plist = plistlib.load(handle)
|
||||
|
||||
for key, values in file_plist.items():
|
||||
result = file_plist[key]
|
||||
result["package"] = key
|
||||
for ts in self.timestamps:
|
||||
if ts in result.keys():
|
||||
result[ts] = convert_timestamp_to_iso(convert_mactime_to_unix(result[ts]))
|
||||
|
||||
self.results.append(result)
|
||||
|
||||
def run(self):
|
||||
|
||||
if self.is_backup:
|
||||
self._find_ios_database(backup_ids=LOCATIOND_BACKUP_IDS)
|
||||
self.log.info("Found Locationd Clients plist at path: %s", self.file_path)
|
||||
self._extract_locationd_entries(self.file_path)
|
||||
elif self.is_fs_dump:
|
||||
for locationd_path in self._get_fs_files_from_patterns(LOCATIOND_ROOT_PATHS):
|
||||
self.file_path = locationd_path
|
||||
self.log.info("Found Locationd Clients plist at path: %s", self.file_path)
|
||||
self._extract_locationd_entries(self.file_path)
|
||||
|
||||
self.log.info("Extracted a total of %d Locationd Clients entries", len(self.results))
|
||||
@@ -1,67 +0,0 @@
|
||||
# Mobile Verification Toolkit (MVT)
|
||||
# Copyright (c) 2021-2022 The MVT Project Authors.
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
import plistlib
|
||||
|
||||
from mvt.common.utils import convert_timestamp_to_iso
|
||||
|
||||
from ..base import IOSExtraction
|
||||
|
||||
OSANALYTICS_ADDAILY_BACKUP_IDS = [
|
||||
"f65b5fafc69bbd3c60be019c6e938e146825fa83",
|
||||
]
|
||||
OSANALYTICS_ADDAILY_ROOT_PATHS = [
|
||||
"private/var/mobile/Library/Preferences/com.apple.osanalytics.addaily.plist",
|
||||
]
|
||||
|
||||
|
||||
class OSAnalyticsADDaily(IOSExtraction):
|
||||
"""Extract network usage information by process, from com.apple.osanalytics.addaily.plist"""
|
||||
|
||||
def __init__(self, file_path=None, base_folder=None, output_folder=None,
|
||||
fast_mode=False, log=None, results=[]):
|
||||
super().__init__(file_path=file_path, base_folder=base_folder,
|
||||
output_folder=output_folder, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
def serialize(self, record):
|
||||
record_data = f"{record['package']} WIFI IN: {record['wifi_in']}, WIFI OUT: {record['wifi_out']} - " \
|
||||
f"WWAN IN: {record['wwan_in']}, WWAN OUT: {record['wwan_out']}"
|
||||
return {
|
||||
"timestamp": record["ts"],
|
||||
"module": self.__class__.__name__,
|
||||
"event": "osanalytics_addaily",
|
||||
"data": record_data,
|
||||
}
|
||||
|
||||
def check_indicators(self):
|
||||
if not self.indicators:
|
||||
return
|
||||
|
||||
for result in self.results:
|
||||
ioc = self.indicators.check_process(result["package"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
self.detected.append(result)
|
||||
|
||||
def run(self):
|
||||
self._find_ios_database(backup_ids=OSANALYTICS_ADDAILY_BACKUP_IDS,
|
||||
root_paths=OSANALYTICS_ADDAILY_ROOT_PATHS)
|
||||
self.log.info("Found com.apple.osanalytics.addaily plist at path: %s", self.file_path)
|
||||
|
||||
with open(self.file_path, "rb") as handle:
|
||||
file_plist = plistlib.load(handle)
|
||||
|
||||
for app, values in file_plist.get("netUsageBaseline", {}).items():
|
||||
self.results.append({
|
||||
"package": app,
|
||||
"ts": convert_timestamp_to_iso(values[0]),
|
||||
"wifi_in": values[1],
|
||||
"wifi_out": values[2],
|
||||
"wwan_in": values[3],
|
||||
"wwan_out": values[4],
|
||||
})
|
||||
|
||||
self.log.info("Extracted a total of %d com.apple.osanalytics.addaily entries", len(self.results))
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user