-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdecrypt_signal.py
1391 lines (1188 loc) · 52.4 KB
/
decrypt_signal.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
import argparse
import pathlib
import os
import json
import base64
import uuid
import random
import string
import mimetypes
import sys
import csv
from datetime import datetime
import pytz
from collections import defaultdict
import sqlcipher3
from modules import shared_utils as su
from modules.shared_utils import bytes_to_hex, log, MalformedKeyError, mime_to_extension
from modules.crypto import aes_256_gcm_decrypt, aes_256_cbc_decrypt, hash_sha256
from modules.htmlreport import generate_html_report
####################### CONSTANTS #######################
VERSION = "1.0"
AUX_KEY_PREFIX = "DPAPI"
DEC_KEY_PREFIX = "v10"
DPAPI_BLOB_GUID = uuid.UUID("df9d8cd0-1501-11d1-8c7a-00c04fc297eb")
EMPTY_IV = "AAAAAAAAAAAAAAAAAAAAAA==" # 16 bytes of 0x00
ATTACHMENT_FOLDER = pathlib.Path("attachments.noindex")
AVATARS_FOLDER = pathlib.Path("avatars.noindex")
DRAFTS_FOLDER = pathlib.Path("drafts.noindex")
####################### EXCEPTIONS #######################
class MalformedInputFileError(Exception):
"""Exception raised for a malformed input file."""
pass
####################### I/O ARGS #######################
# Parse command line arguments
def parse_args():
parser = argparse.ArgumentParser(
prog="SignalDecryptor",
description="Decrypts the forensic artifacts from Signal Desktop on Windows",
usage="""%(prog)s [-m auto] -d <signal_dir> [-o <output_dir>] [OPTIONS]
%(prog)s -m aux -d <signal_dir> [-o <output_dir>] [-kf <file> | -k <HEX>] [OPTIONS]
%(prog)s -m key -d <signal_dir> -o <output_dir> [-kf <file> | -k <HEX>] [OPTIONS]
%(prog)s -m manual -d <signal_dir> [-o <output_dir>] -wS <SID> -wP <password> [OPTIONS]
""",
)
# [-d <signal_dir> | (-c <file> -ls <file>)]
# Informational arguments
parser.add_argument(
"-V",
"--version",
help="Print the version of the script",
action="version",
version=f"%(prog)s {VERSION}",
)
# Custom function to parse mode argument
def parse_mode(value):
aliases = {
"auto": "auto",
"manual": "manual",
"aux": "aux",
"key": "key",
"a": "auto",
"m": "manual",
"ak": "aux",
"dk": "key",
}
normalized_value = value.lower()
if normalized_value not in aliases:
raise argparse.ArgumentTypeError(f"Invalid mode '{value}'. Valid choices are: {', '.join(aliases.keys())}")
return aliases[normalized_value]
# Custom type function to convert HEX to bytes
def hex_to_bytes(value):
value = value.replace(" ", "").lower()
try:
return bytes.fromhex(value)
except ValueError:
raise argparse.ArgumentTypeError(f"Invalid HEX string: {value}")
# Define mode argument
parser.add_argument(
"-m",
"--mode",
help=(
"Mode of execution (choices: 'auto' for Windows Automatic, 'aux' for Auxiliary Key Provided, "
"'key' for Decryption Key Provided), 'manual' for Windows Manual. "
"Short aliases: -mA (Auto), -mAK (Auxiliary Key), -mDK (Decryption Key), -mM (Manual)"
"Default: auto"
),
type=parse_mode,
choices=["auto", "aux", "key", "manual"],
metavar="{auto|aux|key|manual}",
default="auto",
)
# IO arguments
io_group = parser.add_argument_group(
"Input/Output",
"Arguments related to input/output paths. Output directory and either Signal's directory or configuration and local state files are required.",
)
io_group.add_argument(
"-d", "--dir", help="Path to Signal's Roaming directory", type=pathlib.Path, metavar="<dir>", required=True
) # TODO: Change Roaming to other stuff
io_group.add_argument(
"-o",
"--output",
help="Path to the output directory",
type=pathlib.Path,
metavar="<dir>",
)
# io_group.add_argument(
# "-c", "--config", help="Path to the Signal's configuration file", type=pathlib.Path, metavar="<file>"
# )
# io_group.add_argument(
# "-ls", "--local-state", help="Path to the Signal's Local State file", type=pathlib.Path, metavar="<file>"
# )
# Provided key related arguments
key_group = parser.add_argument_group(
"Key Provided Modes", "Arguments available for both Key Provided modes."
).add_mutually_exclusive_group()
key_group.add_argument(
"-kf",
"--key-file",
help="Path to the file containing the HEX encoded key as a string",
type=pathlib.Path,
metavar="<file>",
)
key_group.add_argument("-k", "--key", help="Key in HEX format", type=hex_to_bytes, metavar="<HEX>")
# DPAPI related arguments
# manual_group = parser.add_argument_group("Windows Manual Mode", "Arguments required for manual mode.")
# manual_group.add_argument("-wS", "--windows-sid", help="Target windows user's SID", metavar="<SID>")
# manual_group.add_argument("-wP", "--windows-password", help="Target windows user's password", metavar="<password>")
# Operational/Options arguments
parser.add_argument(
"-nd", "--no-decryption", help="No decription, just print the SQLCipher key", action="store_true"
)
parser.add_argument(
"-sD", "--skip-database", help="Skip exporting a decrypted copy of the database", action="store_true"
)
parser.add_argument("-sA", "--skip-attachments", help="Skip attachment decryption", action="store_true")
parser.add_argument(
"-sR", "--skip-reports", help="Skip the generation of CSV and HTML reports", action="store_true"
)
# Validate the provided timezone.
def validate_timezone(value):
if value not in pytz.all_timezones:
raise argparse.ArgumentTypeError(
f"Invalid timezone: {value}. Please provide a valid timezone (e.g., UTC, GMT, PST, Europe/Lisbon)."
)
return value
parser.add_argument(
"-t",
"--convert-timestamps",
nargs="?",
const="UTC",
default=None,
type=validate_timezone,
metavar="[timezone]", # REVIEW: [] ? or <>
help="Convert timestamps to human-readable format. Provide a timezone (e.g., UTC, GMT, PST). Defaults to UTC when no timezone is provided.",
)
parser.add_argument(
"-mc",
"--merge-conversations",
help="Merge message related reports into single CSV files instead of separating them by conversation",
action="store_true",
)
# parser.add_argument(
# "-iM", "--include-metadata", help="Print user metadata from Signal database", action="store_true"
# )
# Verbosity arguments
verbosity_group = parser.add_mutually_exclusive_group()
verbosity_group.add_argument("-v", "--verbose", help="Enable verbose output", action="count", default=0)
verbosity_group.add_argument("-q", "--quiet", help="Enable quiet output", action="store_true")
# Parse arguments
return parser.parse_args()
# Validate arguments
def validate_args(args: argparse.Namespace):
# Validate Signal directory
if not args.dir.is_dir():
raise FileNotFoundError(f"Signal directory '{args.dir}' does not exist or is not a directory.")
else:
args.config = args.dir / "config.json"
args.local_state = args.dir / "Local State"
# Check for Signal's configuration file
if not args.config.is_file():
raise FileNotFoundError(f"Signal's configuration file '{args.config}' does not exist or is not a file.")
# Check for Signal's local state file
if not args.local_state.is_file():
raise FileNotFoundError(f"Signal's local state file '{args.local_state}' does not exist or is not a file.")
# Validate output directory
if not args.output:
if not args.no_decryption:
log("[!] No output directory provided, assuming no decryption is required")
args.no_decryption = True
elif not args.output.is_dir():
try:
os.makedirs(args.output)
except OSError as e:
raise FileNotFoundError(f"Output directory '{args.output}' does not exist and could not be created.") from e
# Validate auto mode
if args.mode == "auto":
if not sys.platform.startswith("win"):
raise OSError("Automatic mode is only available on Windows.")
# Validate manual mode arguments
if args.mode == "manual":
if not args.windows_sid:
raise ValueError("Windows User SID is required for manual mode.")
if not args.windows_password:
raise ValueError("Windows User Password is required for manual mode.")
# Validate key provided mode arguments
if args.mode in ["aux", "key"]:
if args.key_file:
if not args.key_file.is_file():
raise FileNotFoundError(f"Key file '{args.key_file}' does not exist or is not a file.")
elif not args.key:
raise ValueError("A key is required for Key Provided modes.")
# If mode is Key Provided and skip decryption is enabled, raise an error
if args.mode == "key" and args.skip_decryption:
raise ValueError("Decryption cannot be skipped when providing the decryption key.")
####################### KEY FETCHING #######################
def fetch_key_from_args(args: argparse.Namespace):
# If a key file is provided, read the key from the file
if args.key_file:
log("Reading the key from the file...", 2)
with args.key_file.open("r") as f:
return bytes.fromhex(f.read().strip())
return args.key
def fetch_aux_key(args: argparse.Namespace):
# If the user provided the auxiliary key, return it
if args.mode == "aux":
return fetch_key_from_args(args)
else:
with args.local_state.open("r") as f:
try:
data = json.load(f)
except json.JSONDecodeError:
raise MalformedInputFileError("The Local State file was malformed: Invalid JSON structure.")
# Validate the presence of "os_crypt" and "encrypted_key"
encrypted_key = data.get("os_crypt", {}).get("encrypted_key")
if not encrypted_key:
raise MalformedInputFileError(
"The Local State file was malformed: Missing the encrypted auxiliary key."
)
# Decode the base64 encoded key and remove the prefix
try:
encrypted_key = base64.b64decode(encrypted_key)[len(AUX_KEY_PREFIX) :]
except ValueError:
raise MalformedKeyError("The encrypted key is not a valid base64 string.")
except IndexError:
raise MalformedKeyError("The encrypted key is malformed.")
# Check if this is a DPAPI blob
if encrypted_key[4:20] != DPAPI_BLOB_GUID.bytes_le:
raise MalformedKeyError("The encrypted auxiliary key is not in the expected DPAPI BLOB format.")
if args.mode == "auto":
try:
from modules import windows as win
except ImportError as e:
raise ImportError("Windows-specific module could not be imported:", e)
return win.unprotect_with_dpapi(encrypted_key)
elif args.mode == "manual":
from modules import manual as manual_mode # REVIEW: Should this be imported here or at the top?
return manual_mode.unprotect_manually(encrypted_key, args.windows_sid, args.windows_password)
return None
def fetch_decryption_key(args: argparse.Namespace, aux_key: bytes):
with args.config.open("r") as f:
try:
data = json.load(f)
except json.JSONDecodeError:
raise MalformedInputFileError("The Configuration file was malformed: Invalid JSON structure.")
# Validate the presence of "encryptedKey"
encrypted_key = data.get("encryptedKey")
if not encrypted_key:
raise MalformedInputFileError("The Configuration file was malformed: Missing the encrypted decryption key.")
# Import the hex string into bytes
try:
key = bytes.fromhex(encrypted_key)
except ValueError:
raise MalformedKeyError("The encrypted decryption key is not a valid HEX string.")
# Check if the key has the expected prefix
if key[: len(DEC_KEY_PREFIX)] != DEC_KEY_PREFIX.encode("utf-8"):
raise MalformedKeyError("The encrypted decryption key does not start with the expected prefix.")
key = key[len(DEC_KEY_PREFIX) :]
log("Processing the encrypted decryption key...", 2)
nonce = key[:12] # Nonce is in the first 12 bytes
gcm_tag = key[-16:] # GCM tag is in the last 16 bytes
key = key[12:-16]
log(f"> Nonce: {bytes_to_hex(nonce)}", 3)
log(f"> GCM Tag: {bytes_to_hex(gcm_tag)}", 3)
log(f"> Key: {bytes_to_hex(key)}", 3)
log("Decrypting the decryption key...", 2)
decrypted_key = aes_256_gcm_decrypt(aux_key, nonce, key, gcm_tag)
return bytes.fromhex(decrypted_key.decode("utf-8"))
####################### SQLCIPHER & DATABASE #######################
def open_sqlcipher_db(args: argparse.Namespace, key: bytes):
db_path = args.dir / "sql" / "db.sqlite"
cipher_key = bytes_to_hex(key)
if not db_path.is_file():
raise FileNotFoundError(f"Encrypted database '{db_path}' does not exist or is not a file.")
# Connect to the database
conn = sqlcipher3.connect(db_path)
cursor = conn.cursor()
# Decrypt the database
statement = f"PRAGMA key = \"x'{cipher_key}'\""
log(f"Executing: {statement}", 3)
cursor.execute(statement)
# Test if the decryption key is correct
try:
log("Trying SQLCipher key...", 2)
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';").fetchall()
except sqlcipher3.DatabaseError:
raise sqlcipher3.DatabaseError("Failed to open the database.")
# Export a decrypted copy of the database
if not args.skip_database:
unencrypted_db_path = args.output / "db.sqlite"
if unencrypted_db_path.is_file():
log("[!] The output directory already contains an SQLite DB, skipping export")
else:
udb_name = generate_db_name()
cursor.execute(f"ATTACH DATABASE '{unencrypted_db_path}' AS {udb_name} KEY '';")
cursor.execute(f"SELECT sqlcipher_export('{udb_name}');")
cursor.execute(f"DETACH DATABASE {udb_name};")
log(f"[i] Exported the unencryted database")
return conn, cursor
def select_sql(cursor, statement, name=None):
if name is not None:
log(f"Fetching all {name}...", 2)
try:
cursor.execute(statement)
arr = cursor.fetchall()
if name is not None:
log(f"Found {len(arr)} {name}", 1)
return arr
except sqlcipher3.DatabaseError as e:
raise sqlcipher3.DatabaseError("Failed to execute SQL SELECT (", statement, ")") from e
def fetch_batches_select(cursor, statement, batch_size=10000):
offset = 0
while True:
query = f"{statement} LIMIT {batch_size} OFFSET {offset}"
cursor.execute(query)
rows = cursor.fetchall()
yield rows
offset += batch_size
# If the number of rows fetched is less than the batch size, stop fetching
if len(rows) < batch_size:
break
def handle_avatar(convJson, convType):
"""Yields the avatars in a conversation JSON."""
keyName = "avatar" if convType == "group" else "profileAvatar"
theAvatar = convJson.get(keyName, None)
theAvatars = convJson.get("avatars", [])
if theAvatar is not None:
theAvatars.insert(0, theAvatar)
for avatar in theAvatars:
if avatar.get("localKey", None) is not None:
avatar["iv"] = EMPTY_IV
imgPath = avatar.get("imagePath", None)
avatar["path_pref"] = AVATARS_FOLDER if imgPath else ATTACHMENT_FOLDER
if imgPath is not None:
avatar["path"] = imgPath
yield avatar
return
def process_attachment(args: argparse.Namespace, attachments_dir, attachment, statuses):
if attachment.get("contentType", "") == "text/x-signal-story":
return
subpath = attachment["path"]
try:
# Fetch attachment crypto data
key = base64.b64decode(attachment["localKey"])[:32]
if "iv" not in attachment:
# If the IV is not present in the attachment, use the empty IV
attachment["iv"] = EMPTY_IV
nonce = base64.b64decode(attachment["iv"])
size = int(attachment["size"])
# Encrypted attachment path
folder = ATTACHMENT_FOLDER if "path_pref" not in attachment else attachment["path_pref"]
enc_attachment_path = args.dir / folder / subpath
# Check if the encrypted attachment is present on the expected path
if not enc_attachment_path.is_file():
log(f"[!] Attachment {subpath} not found", 2)
statuses["error"] += 1
return
# Fetch attachment cipherdata
with enc_attachment_path.open("rb") as f:
enc_attachment_data = f.read()
# Decrypt the attachment
attachment_data = aes_256_cbc_decrypt(key, nonce, enc_attachment_data)
attachment_data = attachment_data[16 : 16 + size] # Dismiss the first 16 bytes and the padding
if bytes.fromhex(attachment["plaintextHash"]) != hash_sha256(attachment_data):
log(f"[!] Attachment {subpath} failed integrity check", 2)
statuses["integrity_error"] += 1
# Save the attachment to a file
filePath = subpath
if "contentType" in attachment:
filePath += f"{mime_to_extension(attachment['contentType'])}"
# Ensure the parent directory exists
attachment_path = attachments_dir / folder / filePath
attachment_path.parent.mkdir(parents=True, exist_ok=True)
with attachment_path.open("wb") as f:
f.write(attachment_data)
statuses["exported"] += 1
except Exception as e:
statuses["error"] += 1
log(f"[!] Failed to export attachment {subpath}: {e}", 3)
def export_attachments(cursor, args: argparse.Namespace):
"""Export Signal attachments from the database."""
attachments_dir = args.output
statuses = {
"error": 0,
"exported": 0,
"integrity_error": 0,
}
log("[i] Processing metadata and decrypting attachments...", 2)
# Fetch and process message attachments
for msg_batch in fetch_batches_select(
cursor,
"SELECT json from messages WHERE hasFileAttachments = TRUE OR hasAttachments = TRUE OR json LIKE '%\"preview\":[{%'",
500,
):
it_attachments = []
for entry in msg_batch:
# Parse the message metadata
msgJson = json.loads(entry[0])
attachments = msgJson.get("attachments", [])
# Preview of embed urls
if "preview" in msgJson and "image" in msgJson["preview"]:
attachments.append(msgJson["preview"]["image"])
it_attachments.extend(attachments)
for attachment in it_attachments:
process_attachment(args, attachments_dir, attachment, statuses)
del it_attachments
# Fetch conversation avatars and draft attachments
conversations = select_sql(
cursor,
"SELECT json, type FROM conversations;",
"conversations",
)
if len(conversations) == 0:
log("[i] No conversations were found in the database")
else:
withAvatar = 0
draftAttachments = 0
it_attachments = []
for conv in conversations:
convJsonStr, convType = conv
convJson = json.loads(convJsonStr)
for avatar in handle_avatar(convJson, convType):
avatar["contentType"] = "image/jpeg"
it_attachments.append(avatar)
withAvatar += 1
for atch in convJson.get("draftAttachments", []):
atch["iv"] = EMPTY_IV
atch["path_pref"] = DRAFTS_FOLDER
it_attachments.append(atch)
draftAttachments += 1
del conversations
log(f"[i] Found {withAvatar} conversation avatars", 2)
log(f"[i] Found {draftAttachments} draft attachments", 2)
for attachment in it_attachments:
process_attachment(args, attachments_dir, attachment, statuses)
log(f"[i] Exported {statuses['exported']} attachments")
if statuses["integrity_error"] > 0:
log(f"[!] {statuses['integrity_error']} attachments failed integrity check")
if statuses["error"] > 0:
log(f"[!] Failed to export {statuses['error']} attachments")
####################### CSV/HTML REPORTS #######################
def write_csv_file(path, headers, rows):
"""Writes a CSV file with the provided headers and rows."""
if len(rows) == 0:
return True
try:
fileExists = path.is_file()
with open(path, "a", newline="", encoding="utf-8") as csvfile:
writer = csv.writer(csvfile, delimiter=",")
if not fileExists:
csvfile.write("SEP=,\n")
writer.writerow(headers)
writer.writerows(rows)
except Exception as e:
log(f"[!] Failed to write CSV file: {e}")
return False
return True
def process_database_and_write_reports(cursor, args: argparse.Namespace):
"""Write reports from the artifacts found in the database"""
reports_folder = args.output / "reports"
reports_folder.mkdir(parents=True, exist_ok=True)
log("[i] Processing the database...", 1)
# Fetch the user's service ID
user_uuid = None
try:
cursor.execute("SELECT json FROM items WHERE id = 'uuid_id';")
row = cursor.fetchone()
if row is None:
log(f"[i] User's Service ID not found in items table", 1)
user_uuid = json.loads(row[0]).get("value", None)
if user_uuid is not None:
user_uuid = user_uuid.split(".")[0]
except sqlcipher3.DatabaseError as e:
raise sqlcipher3.DatabaseError("Failed to retrieve items from database") from e
# Fetch conversations
conversations = select_sql(
cursor,
"SELECT id, json, type, active_at, serviceId, profileFullName, e164 FROM conversations;",
"conversations",
)
if len(conversations) == 0:
log("[i] No conversations were found in the database")
return
# Timestamp managing function
def tts(timestamp, ms=True):
if timestamp is None:
return None
if timestamp == 9007199254740991:
return None
return localize_timestamp(timestamp, args, ms)
# Create CSV headers and row arrays
CONVERSATIONS_HEADERS = [
"ID",
"Type",
"Name",
"Last Active At",
"Unread Messages",
"Total Message Count",
"Sent Message Count",
"Last Message Timestamp",
"Last Message Author",
"Last Message",
"Last Message Deleted?",
"Draft Timestamp",
"Draft Message",
"Draft Attachments",
"Expire Timer (seconds)",
"Is Archived?",
"Avatar Path",
"Added To Group By (ID)",
"Added To Group By (Name)",
"Group Description",
]
CONTACTS_HEADERS = ["Conversation ID", "Service ID", "Name", "E164", "Username", "Profile Name", "Nickname", "Note"]
GROUPS_MEMBERS_HEADERS = [
"Conversation ID",
"Group Name",
"Group ID",
"Type",
"Member Service ID",
"Member Name",
"Role",
]
conv_rows = []
contacts_rows = []
group_members_rows = []
service2name = {} # Dictionary of service ID to contact name
group2name = {} # Dictionary of group ID to group name
conv2service = {} # Dictionary of conversation ID to service ID
conv2group = {} # Dictionary of conversation ID to group ID
convId2conv = {} # Dictionary of conversation ID to name and type
# Auxiliary functions
def print_mentions_in_message(text, bodyRanges):
"""Prints mentions in the message."""
if bodyRanges is None or text is None or type(bodyRanges) is not list:
return text
# Only include mentions, remove other bodyRanges
bodyRanges = list(filter(lambda x: "mentionAci" in x, bodyRanges))
if len(bodyRanges) == 0:
return text
newText = ""
j = 0
for i in range(len(bodyRanges)):
mention = bodyRanges[i]
if "mentionAci" not in mention:
continue
# Text before the mentio
newText += text[j : mention["start"]]
j += mention["start"] + mention["length"] # Skip the mention's representation
newText += "@" + mention.get("replacementText", service2name.get(mention["mentionAci"], "unknown"))
return newText
def process_message_bodyranges(
msgJson,
body=None,
keyBodyRanges="bodyRanges",
keyBody="body",
):
"""Process a message's body ranges."""
msgBodyRanges = msgJson.get(keyBodyRanges, [])
msgBody = body if body != None else msgJson.get(keyBody, None)
if len(msgBodyRanges) > 0:
msgBody = print_mentions_in_message(msgBody, msgBodyRanges)
return msgBody
def process_last_message(convJson):
"""Process the last message in a conversation."""
last_message = print_mentions_in_message(
convJson.get("lastMessage", None), convJson.get("lastMessageBodyRanges", None)
)
prefix = convJson.get("lastMessagePrefix", "")
if prefix:
last_message = f"{prefix} {last_message}"
return last_message
def process_group_members(convId, convJson):
"""Process group members and add to group_members_rows."""
MEMBER_KEYS = {
"membersV2": "Member",
"pendingMembersV2": "Pending Member",
"pendingAdminApprovalV2": "Pending Admin Approval",
"bannedMembersV2": "Banned Member",
}
for mbrKey, memberType in MEMBER_KEYS.items():
if mbrKey not in convJson:
continue
for member in convJson[mbrKey]:
mbrServiceId = member.get("aci", member.get("serviceId", None))
role = "Administrator" if member.get("role", None) == 2 else None
group_members_rows.append(
[
convId,
convJson.get("name", ""),
convJson.get("groupId", None),
memberType,
mbrServiceId,
service2name.get(mbrServiceId, None),
role,
]
)
def details_to_text(details):
"""Converts group change details to a readable string."""
if details is None or "type" not in details:
return None
dType = details["type"]
dRemoved = details.get("removed", None)
dNewPriv = details.get("newPrivilege", None)
newPrivSuffix = " to Admin only" if dNewPriv == 3 else " to All members"
def get_mbr_suffix():
mbrServiceId = details.get("aci", None)
mbrName = service2name.get(mbrServiceId, "")
return f"{mbrName} (Service ID: {mbrServiceId})"
if dType == "create":
return "Group created"
elif dType == "title":
return f"Group title changed to '{details.get('newTitle', '')}'"
elif dType == "description":
if not dRemoved:
return f"Group description changed to '{details.get('description', '')}'"
return "Group description removed"
elif dType == "group-link-add":
dPriv = details.get("privilege", None)
if dPriv != 1 and dPriv != 3:
return "Group link enabled"
# 1 = without admin approval, 3 = with admin approval
return f"Group link enabled {'without' if dPriv == 1 else 'with'} admin approval"
elif dType == "group-link-reset":
return "Group link reset"
elif dType == "group-link-remove":
return "Group link disabled"
elif dType == "access-invite-link":
# 3 = enabled, 1 = disabled
return f"Admin approval {'enabled' if dNewPriv == 3 else 'disabled'} for group join link"
elif dType == "access-members":
return f"Permission to add members changed{newPrivSuffix}"
elif dType == "access-attributes":
return f"Permission to modify group information changed{newPrivSuffix}"
elif dType == "announcements-only":
if details.get("announcementsOnly", False):
return "Group set to announcements only (only admins can send messages)"
return "Group set to allow all members to send messages"
elif dType == "avatar":
if dRemoved:
return "Group avatar removed"
return "Group avatar changed"
elif dType == "member-add":
return f"Member added: {get_mbr_suffix()}"
elif dType == "member-remove":
return f"Member removed: {get_mbr_suffix()}"
elif dType == "member-privilege":
return f"Member role updated to {'Admin' if dNewPriv == 2 else 'Member'} for {get_mbr_suffix()}"
return 'Uknown group change check "Details in JSON" for more information'
myServiceId = None
# Populate the service2name dictionary
for conv in conversations:
(convId, convJsonStr, convType, convActiveAt, serviceId, profileFullName, e164) = conv[:7]
convJson = json.loads(convJsonStr)
theName = convJson.get("name", "")
if convType == "private":
if theName == "":
# If there is no "contact name" in the conversation JSON, use the profileFullName or e164
theName = profileFullName if profileFullName is not None else e164
service2name[serviceId] = theName
conv2service[convId] = serviceId
if "avatars" in convJson:
myServiceId = serviceId
elif convType == "group":
groupId = convJson.get("groupId", None)
group2name[groupId] = theName
conv2group[convId] = groupId
convId2conv[convId] = {"name": theName, "type": convType}
# Process conversations table data
for conv in conversations:
(convId, convJsonStr, convType, convActiveAt, serviceId, profileFullName, e164) = conv[:7]
convJson = json.loads(convJsonStr)
avatarPathParts = [
str(avatar["path_pref"] / avatar["path"])
for avatar in handle_avatar(convJson, convType)
if "path" in avatar
]
if len(avatarPathParts) > 1:
avatarPathParts[0] += " (CURRENT)"
avatarPath = "\n".join(filter(None, avatarPathParts)) if len(avatarPathParts) > 0 else None
convLastMsg = process_last_message(convJson)
convDraftTimestamp = tts(convJson.get("draftTimestamp", None))
convDraft = print_mentions_in_message(convJson.get("draft", None), convJson.get("draftBodyRanges", None))
convDraftAttachments = convJson.get("draftAttachments", [])
convDraftAttachments = [entry["path"] for entry in convDraftAttachments if "path" in entry]
convDraftAttachmentsStr = (
"\n".join(filter(None, convDraftAttachments)) if len(convDraftAttachments) > 0 else None
)
if convType == "private":
cNote = convJson.get("note", None)
cNickname = convJson.get("nicknameGivenName", "") + " " + convJson.get("nicknameFamilyName", "")
cNickname = cNickname.strip()
contacts_rows.append(
[
convId,
serviceId,
convJson.get("name", ""),
e164,
convJson.get("username", ""),
profileFullName,
None if cNickname == "" else cNickname,
cNote,
]
)
elif convType == "group":
process_group_members(convId, convJson)
added_by = convJson.get("addedBy", None)
# Append the conversation data to the CSV rows
conv_rows.append(
[
convId,
convType,
convJson.get("name", ""),
tts(convActiveAt),
convJson.get("unreadCount", 0),
convJson.get("messageCount", 0),
convJson.get("sentMessageCount", 0),
tts(convJson.get("lastMessageTimestamp", None)),
convJson.get("lastMessageAuthor", None),
convLastMsg,
convJson.get("lastMessageDeletedForEveryone", None),
convDraftTimestamp,
convDraft,
convDraftAttachmentsStr,
convJson.get("expireTimer", None), # REVIEW: Keep in seconds?
convJson.get("isArchived", False),
avatarPath,
added_by,
service2name.get(added_by, None),
convJson.get("description", ""),
]
)
# Write the csv files
if not write_csv_file(reports_folder / "conversations.csv", CONVERSATIONS_HEADERS, conv_rows):
log("[!] Failed to write the conversations CSV file")
if not write_csv_file(reports_folder / "contacts.csv", CONTACTS_HEADERS, contacts_rows):
log("[!] Failed to write the contacts CSV file")
if not write_csv_file(reports_folder / "groups_members.csv", GROUPS_MEMBERS_HEADERS, group_members_rows):
log("[!] Failed to write the groups members CSV file")
# Free memory
conv_rows.clear()
contacts_rows.clear()
group_members_rows.clear()
del conv_rows
del contacts_rows
del group_members_rows
MESSAGES_HEADERS = [
"Message ID",
"Type",
"Conversation ID",
"Conversation Type",
"Conversation Name",
"Sent At",
"Received At",
"Author",
"Message",
"Has Attachments?",
"Is View Once?",
"Is Erased?",
"Expires At",
"Message Status",
"Has Reactions?",
"Quoted Message ID",
"Has Edit History?",
"Last Edit Received At",
"Author's Service ID",
"Author's Device",
]
MSGS_STATUSES_HEADERS = [
"Message ID",
"Target's Conversation ID",
"Target's Name",
"Message Status",
"Status Timestamp",
]
MSGS_VERSION_HISTS_HEADERS = ["Message ID", "Version Received At", "Body"]
MSGS_REACTIONS_HEADERS = ["Message ID", "Reactor's Conversation ID", "Reactor's Name", "Reaction", "Timestamp"]
MSGS_ATTACHMENTS_HEADERS = ["Message ID", "Type", "Path", "Content Type"]
GROUPS_CHANGES_HEADERS = [
"Message ID",
"Conversation ID",
"Group ID",
"Group Name",
"Timestamp",
"Author's Name",
"Type",
"Details",
"Details in JSON",
"Author's Service ID",
] # TODO: Details -> Something better
for msg_batch in fetch_batches_select(
cursor,
"SELECT id, type, conversationId, json, hasAttachments, hasFileAttachments, readStatus, seenStatus, sent_at, received_at_ms, expiresAt, body, isErased, isViewOnce, sourceServiceId, sourceDevice FROM messages WHERE type IN ('outgoing','incoming','group-v2-change','timer-notification', 'story')",
):
messages_rows = defaultdict(list)
msgs_statuses_rows = defaultdict(list)
msgs_version_hists_rows = defaultdict(list)
msgs_reactions_rows = defaultdict(list)
msgs_attachments_rows = defaultdict(list)
groups_changes_rows = defaultdict(list)
convIdKeys = []
for msg in msg_batch:
(
msgId,
msgType,
msgConvId,
msgJsonStr,
hasAttachments,
hasFileAttachments,
readStatus,
seenStatus,
sent_at,
received_at_ms,
msgExpiresAt,
body,
isErased,
isViewOnce,
sourceServiceId,
sourceDevice,
) = msg
convIdKey = msgConvId if not args.merge_conversations else None
if convIdKey not in convIdKeys:
convIdKeys.append(convIdKey)
try:
msgJson = json.loads(msgJsonStr)
msgConvType = convId2conv.get(msgConvId, {}).get("type", "")
msgConvName = convId2conv.get(msgConvId, {}).get("name", "")
msgAuthorServiceId = sourceServiceId
msgAuthor = service2name.get(msgAuthorServiceId, "")
if msgType in ("outgoing", "incoming", "timer-notification", "story"):
# Message body handling