Skip to content

Commit ecfcb79

Browse files
committed
more stuff
1 parent a5c77c0 commit ecfcb79

14 files changed

+351
-14
lines changed

README.md

+3
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ Feel free to open [issues](https://github.com/Te-k/analyst-scripts/issues) if yo
99
* `clamav_to_yara.py` : Convert ClamAV signature to Yara (from the [Malware Analyst's Cookbook](https://www.wiley.com/en-us/Malware+Analyst%27s+Cookbook+and+DVD%3A+Tools+and+Techniques+for+Fighting+Malicious+Code-p-9780470613030))
1010
* `cloudcidrs.py` : check if an IP is part of a Cloud provider range (for now, only Google Cloud and Amazon AWS, inspired from [cloudcidrs](https://cloudyr.github.io/cloudcidrs/))
1111
* `disassemble.py` : disassemble a binary file using [Capstone](http://www.capstone-engine.org/) (mostly for shellcode)
12+
* `csv_extract.py` : extract a column from a csv file
1213
* `hostnametoips.py` : resolve a list of hostnames in a text files and return list of uniq IPs
1314
* `infect.sh` : classic script to create an encrypted zip of a file with password infected (password used to share malware)
1415
* `mqtt-get.py` : basic script to do get requests to an [MQTT](https://fr.wikipedia.org/wiki/MQTT) service
@@ -41,6 +42,7 @@ Feel free to open [issues](https://github.com/Te-k/analyst-scripts/issues) if yo
4142
* `api.py` : API and CLI tool to query Google URL shortener goo.gl (soon deprecated by Google)
4243
* [harpoon-extra](harpoon-extra/) : some scripts expanding [Harpoon](https://github.com/Te-k/harpoon) features
4344
* [web](web/) : Web stuff (mostly outdated)
45+
* [macos](macos/) : Mac OSX related scripts
4446
* [misp](misp/) : some scripts helping using [MISP servers](https://www.misp-project.org/)
4547
* [network](network/) : network related scripts
4648
* [ooni](ooni/) : [OONI](https://ooni.torproject.org/) API scripts
@@ -53,3 +55,4 @@ Feel free to open [issues](https://github.com/Te-k/analyst-scripts/issues) if yo
5355
* [twilio](twilio/) : scripts related to [Twilio](https://www.twilio.com/)
5456
* [twitter](twitter/) : Twitter stuff
5557
* [visualization](visualization/) : nice graphs everywhere
58+
* [vt](vt/) : scripts related to Virus Total

csv_extract.py

+19
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
import csv
2+
import argparse
3+
4+
if __name__ == "__main__":
5+
parser = argparse.ArgumentParser(description='Process some integers.')
6+
parser.add_argument('COLUMN', type=int, default=0,
7+
help='Column of the file')
8+
parser.add_argument('FILE', help='CSV file')
9+
parser.add_argument('--delimiter', '-d', default=',', help='Delimiter')
10+
parser.add_argument('--quotechar', '-q', default='"', help='Quote char')
11+
args = parser.parse_args()
12+
13+
with open(args.FILE) as csvfile:
14+
reader = csv.reader(csvfile, delimiter=args.delimiter, quotechar=args.quotechar)
15+
16+
for row in reader:
17+
print(row[args.COLUMN])
18+
19+

forensic/README.md

+2
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,6 @@ Two scripts here to help creating timeline on Linux live systems :
55
* `mactime.py` : convert this list of files into a csv timeline
66

77
Misc :
8+
* `extract_chrome_history.py`: extract history from a Chrome History SQlite file
89
* `ios_unpack.py` : unpack iOS backup folder from iTunes or [libimobiledevice](https://www.libimobiledevice.org/)
10+

forensic/extract_chrome_history.py

+47
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
import argparse
2+
import sqlite3
3+
import csv
4+
from datetime import datetime
5+
6+
"""
7+
Schema
8+
CREATE TABLE urls(id INTEGER PRIMARY KEY AUTOINCREMENT,url LONGVARCHAR,title LONGVARCHAR,visit_count INTEGER DEFAULT 0 NOT NULL,typed_count INTEGER DEFAULT 0 NOT NULL,last_visit_time INTEGER NOT NULL,hidden INTEGER DEFAULT 0 NOT NULL);
9+
CREATE TABLE visits(id INTEGER PRIMARY KEY,url INTEGER NOT NULL,visit_time INTEGER NOT NULL,from_visit INTEGER,transition INTEGER DEFAULT 0 NOT NULL,segment_id INTEGER,visit_duration INTEGER DEFAULT 0 NOT NULL,incremented_omnibox_typed_score BOOLEAN DEFAULT FALSE NOT NULL);
10+
"""
11+
12+
def convert_timestamp(tmstp):
13+
return datetime.fromtimestamp(int(tmstp)/ 1000000 - 11644473600)
14+
15+
16+
if __name__ == '__main__':
17+
parser = argparse.ArgumentParser(description='Process some integers.')
18+
parser.add_argument('FILE', help='History file')
19+
parser.add_argument('--filter', '-f', help='Filter on the url')
20+
args = parser.parse_args()
21+
22+
23+
query = "SELECT urls.id, urls.url, urls.title, urls.visit_count, urls.typed_count, urls.last_visit_time, urls.hidden, visits.visit_time, visits.from_visit, visits.visit_duration, visits.transition, visit_source.source FROM urls JOIN visits ON urls.id = visits.url LEFT JOIN visit_source ON visits.id = visit_source.id"
24+
if args.filter:
25+
query += ' WHERE urls.url like "%{}%"'.format(args.filter)
26+
query += " ORDER BY visits.visit_time;"
27+
28+
conn = sqlite3.connect(args.FILE)
29+
c = conn.cursor()
30+
31+
32+
print("url_id,url,title,#visits,typed_count,last_visit_time,hidden,visit_time,from_visit,visit_duration,transition,source")
33+
for row in c.execute(query):
34+
print("{},{},\"{}\",{},{},{},{},{},{},{},{},{}".format(
35+
row[0],
36+
row[1],
37+
row[2],
38+
row[3],
39+
row[4],
40+
convert_timestamp(row[5]).strftime("%Y-%m-%d %H:%M:%S:%f"),
41+
row[6],
42+
convert_timestamp(row[7]).strftime("%Y-%m-%d %H:%M:%S:%f"),
43+
row[8],
44+
row[9],
45+
row[10],
46+
row[11]
47+
))

ghidra_scripts/yara-crypto.yar

+26-14
Original file line numberDiff line numberDiff line change
@@ -243,7 +243,7 @@ rule BLOWFISH_Constants {
243243
version = "0.1"
244244
strings:
245245
$c0 = { D1310BA6 }
246-
$c1 = { A60B31D1 }
246+
$c1 = { A60B31D1 }
247247
$c2 = { 98DFB5AC }
248248
$c3 = { ACB5DF98 }
249249
$c4 = { 2FFD72DB }
@@ -421,7 +421,7 @@ rule DarkEYEv3_Cryptor {
421421
hash8 = "f3d5b71b7aeeb6cc917d5bb67e2165cf8a2fbe61"
422422
score = 55
423423
strings:
424-
$s0 = "\\DarkEYEV3-"
424+
$s0 = "\\DarkEYEV3-"
425425
condition:
426426
uint16(0) == 0x5a4d and $s0
427427
}
@@ -833,7 +833,7 @@ rule OpenSSL_DSA
833833
meta:
834834
author="_pusher_"
835835
date="2016-08"
836-
strings:
836+
strings:
837837
$a0 = "bignum_data" wide ascii nocase
838838
$a1 = "DSA_METHOD" wide ascii nocase
839839
$a2 = "PDSA" wide ascii nocase
@@ -1048,16 +1048,28 @@ rule RijnDael_AES_CHAR_inv
10481048
condition:
10491049
$c0
10501050
}
1051+
rule RijnDael_AES_CHAR_inv2 {
1052+
meta:
1053+
author = "Etienne Maynier"
1054+
description = "Rijndael AES S-inv"
1055+
ref = "https://en.wikipedia.org/wiki/Rijndael_S-box"
10511056

1052-
rule RijnDael_AES_LONG
1053-
{ meta:
1054-
author = "_pusher_"
1055-
description = "RijnDael AES"
1056-
date = "2016-06"
1057-
strings:
1058-
$c0 = { 63 7C 77 7B F2 6B 6F C5 30 01 67 2B FE D7 AB 76 CA 82 C9 7D FA 59 47 F0 AD D4 A2 AF 9C A4 72 C0 }
1059-
condition:
1060-
$c0
1057+
strings:
1058+
$c0 = { 52 09 6a d5 30 36 a5 38 bf 40 a3 9e 81 f3 d7 fb 7c e3 39 82 9b 2f ff 87 34 8e 43 44 c4 de e9 cb }
1059+
condition:
1060+
$c0
1061+
}
1062+
1063+
rule RijnDael_AES_Key_Schedule {
1064+
meta:
1065+
author = "Etienne Maynier"
1066+
ref = "https://en.wikipedia.org/wiki/AES_key_schedule"
1067+
1068+
strings:
1069+
$c0 = { 01 02 04 08 10 20 40 80 1B 36 }
1070+
1071+
condition:
1072+
$c0
10611073
}
10621074

10631075
rule RsaRef2_NN_modExp
@@ -1189,15 +1201,15 @@ rule x509_public_key_infrastructure_cert
11891201
ext = "crt"
11901202
strings:
11911203
$c0 = { 30 82 ?? ?? 30 82 ?? ?? }
1192-
condition:
1204+
condition:
11931205
$c0
11941206
}
11951207
11961208
rule pkcs8_private_key_information_syntax_standard
11971209
{ meta:
11981210
desc = "Found PKCS #8: Private-Key"
11991211
ext = "key"
1200-
strings:
1212+
strings:
12011213
$c0 = { 30 82 ?? ?? 02 01 00 }
12021214
condition:
12031215
$c0

iocs/extract_hashes.py

+22
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
import os
2+
import re
3+
import argparse
4+
5+
6+
if __name__ == "__main__":
7+
parser = argparse.ArgumentParser(description='Process some integers.')
8+
parser.add_argument('FILE', help="File to search for hashes")
9+
args = parser.parse_args()
10+
11+
with open(args.FILE) as f:
12+
data = f.read().split("\n")
13+
14+
hashes = set()
15+
16+
for d in data:
17+
r = re.search("[0-9a-fA-F]{64}", d)
18+
if r:
19+
hashes.add(r.group())
20+
21+
for h in hashes:
22+
print(h)

macos/README.md

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
# Mac OS stuff
2+
3+
* `macho_print_lief.py` : print raw data from LIEF
4+
* `macho_print.py` : print information on a Mach-O file
5+
* `macho_rename_section.py` : rename a Macho section

macos/check_kext_kk.py

+21
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
import json
2+
import argparse
3+
4+
if __name__ == "__main__":
5+
parser = argparse.ArgumentParser(description='Analyse kext and KnockKnock files')
6+
parser.add_argument('JSONFILE', help='JSON File saved by kext or knock knock')
7+
args = parser.parse_args()
8+
9+
with open(args.JSONFILE) as f:
10+
data = json.loads(f.read())
11+
12+
for k in data.keys():
13+
print("Checking {}".format(k))
14+
for l in data[k]:
15+
if "VT detection" in l:
16+
if not l["VT detection"].startswith("0/"):
17+
print("Suspicious detection in VT:")
18+
print(json.dumps(l, indent=4))
19+
else:
20+
print("Suspicious detection in VT:")
21+
print(json.dumps(l, indent=4))

macos/extract_kext_kk.py

+20
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
import json
2+
import argparse
3+
4+
if __name__ == "__main__":
5+
parser = argparse.ArgumentParser(description='Extract SHA1 kext and KnockKnock files')
6+
parser.add_argument('JSONFILE', help='JSON File saved by kext or knock knock')
7+
args = parser.parse_args()
8+
9+
with open(args.JSONFILE) as f:
10+
data = json.loads(f.read())
11+
12+
hashes = set()
13+
for k in data.keys():
14+
for l in data[k]:
15+
if "hashes" in l.keys():
16+
if 'sha1' in l['hashes']:
17+
hashes.add(l['hashes']['sha1'])
18+
19+
for l in hashes:
20+
print(l)

macos/macho_print.py

+87
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,87 @@
1+
import lief
2+
import argparse
3+
import hashlib
4+
5+
6+
if __name__ == '__main__':
7+
parser = argparse.ArgumentParser(description='Print Mach-O information')
8+
parser.add_argument('MACHO', help='Mach-o file')
9+
args = parser.parse_args()
10+
11+
12+
binary = lief.parse(args.MACHO)
13+
14+
with open(args.MACHO, 'rb') as f:
15+
data = f.read()
16+
17+
# General information -> CPU Type
18+
# Hash, CPU Type, Size
19+
print("General Information")
20+
print("=" * 80)
21+
for algo in ["md5", "sha1", "sha256"]:
22+
m = getattr(hashlib, algo)()
23+
m.update(data)
24+
print("{:15} {}".format(algo.upper()+":", m.hexdigest()))
25+
print("{:15} {} bytes".format("Size:", len(data)))
26+
print("{:15} {}".format("Type:", binary.header.cpu_type.name))
27+
print("Entry point:\t0x%x" % binary.entrypoint)
28+
print("")
29+
30+
# Commands
31+
print("Commands")
32+
print("=" * 80)
33+
for c in binary.commands:
34+
if c.command.name == "SEGMENT_64":
35+
print("{:20} {:10} {:5} {:14} {}".format(
36+
c.command.name,
37+
c.name if hasattr(c, 'name') else '',
38+
c.size,
39+
hex(c.virtual_address) if hasattr(c, 'virtual_address') else "",
40+
hex(c.file_offset) if hasattr(c, 'file_offset') else "",
41+
))
42+
elif c.command.name in ["LOAD_DYLIB", "LOAD_WEAK_DYLIB"]:
43+
print("{:20} {} (version {})".format(
44+
c.command.name,
45+
c.name,
46+
".".join([str(a) for a in c.current_version])
47+
))
48+
elif c.command.name == "UUID":
49+
print("{:20} {}".format(
50+
c.command.name,
51+
''.join('{:02x}'.format(x) for x in c.uuid)
52+
))
53+
else:
54+
print("{:20} {:20}".format(
55+
c.command.name,
56+
c.name if hasattr(c, 'name') else ''
57+
))
58+
print("")
59+
60+
# Sections
61+
print("Sections")
62+
print("=" * 80)
63+
print("%-16s %-9s %-12s %-9s %-9s %-25s %s" % ( "Name", "Segname", "VirtAddr", "RawAddr", "Size", "type", "Md5"))
64+
for s in binary.sections:
65+
m = hashlib.md5()
66+
m.update(bytearray(s.content))
67+
print("%-16s %-9s %-12s %-9s %-9s %-25s %s" % (
68+
s.name,
69+
s.segment.name,
70+
hex(s.virtual_address),
71+
hex(s.offset),
72+
s.size,
73+
str(s.type).replace("SECTION_TYPES.", ""),
74+
m.hexdigest()
75+
))
76+
print("")
77+
78+
# Imports (binding infos)
79+
print("Imports")
80+
print("=" * 80)
81+
for f in binary.imported_symbols:
82+
try:
83+
print("{:35s} {}".format(f.name, f.binding_info.library.name))
84+
except lief.not_found:
85+
print(f.name)
86+
87+

macos/macho_print_lief.py

+13
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
import lief
2+
import argparse
3+
import hashlib
4+
5+
6+
if __name__ == '__main__':
7+
parser = argparse.ArgumentParser(description='Print Mach-O information')
8+
parser.add_argument('MACHO', help='Mach-o file')
9+
args = parser.parse_args()
10+
11+
12+
binary = lief.parse(args.MACHO)
13+
print(binary)

macos/macho_rename_section.py

+26
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
import lief
2+
import argparse
3+
import os
4+
5+
6+
if __name__ == "__main__":
7+
parser = argparse.ArgumentParser(description='Rename the __cfstring section in __text')
8+
parser.add_argument('MACHO', help='Mach-O binary')
9+
parser.add_argument('NAME', help='Name of the section')
10+
parser.add_argument('NEWNAME', help='Name of the section')
11+
args = parser.parse_args()
12+
13+
binary = lief.parse(args.MACHO)
14+
found = False
15+
for s in binary.sections:
16+
if s.name == args.NAME:
17+
s.name = args.NEWNAME
18+
print("Section found")
19+
found = True
20+
break
21+
22+
if not found:
23+
print("This section was not found in this binary")
24+
else:
25+
binary.write(args.MACHO + "_renamed")
26+

vt/README.md

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
# Virus Total
2+
3+
* `check_hashes.py` : check for a list of hashes on VT

0 commit comments

Comments
 (0)