Skip to content

Commit c5769cf

Browse files
committed
Implemented Multithreading
Bot now posts to Twitter (!) Cleaned up imports Worked on regexes
1 parent ffa6e1b commit c5769cf

8 files changed

+30
-25
lines changed

dumpmon.py

+6-5
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
from lib.regexes import regexes
1212
from lib.Pastebin import Pastebin, PastebinPaste
1313
from lib.Slexy import Slexy, SlexyPaste
14+
from lib.Pastie import Pastie, PastiePaste
1415
from lib.helper import log
1516
from time import sleep
1617
import twitter
@@ -33,12 +34,12 @@ def monitor():
3334
tweet_lock = threading.Lock()
3435

3536
pastebin_thread = threading.Thread(target=Pastebin().monitor, args=[bot,log_lock, tweet_lock])
36-
pastebin_thread.daemon = True
37-
pastebin_thread.start()
38-
3937
slexy_thread = threading.Thread(target=Slexy().monitor, args=[bot,log_lock, tweet_lock])
40-
slexy_thread.daemon = True
41-
slexy_thread.start()
38+
pastie_thead = threading.Thread(target=Pastie().monitor, args=[bot,log_lock, tweet_lock])
39+
40+
for thread in (pastebin_thread, slexy_thread, pastie_thead):
41+
thread.daemon = True
42+
thread.start()
4243

4344
# Let threads run
4445
try:

lib/Paste.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -45,15 +45,16 @@ def match(self):
4545
self.num_hashes = len(self.hashes)
4646
for regex in regexes['db_keywords']:
4747
if regex.search(self.text):
48-
log(regex.search(self.text).group(1))
48+
log('\t[+] ' + regex.search(self.text).group(1))
4949
self.db_keywords += round(1/float(len(regexes['db_keywords'])), 2)
5050
for regex in regexes['blacklist']:
5151
if regex.search(self.text):
52-
log(regex.search(self.text).group(1))
52+
log('\t[-] ' + regex.search(self.text).group(1))
5353
self.db_keywords -= round(1.25 * (1/float(len(regexes['db_keywords']))), 2)
5454
if (self.num_emails >= settings.EMAIL_THRESHOLD) or (self.num_hashes >= settings.HASH_THRESHOLD) or (self.db_keywords >= settings.DB_KEYWORDS_THRESHOLD):
5555
self.type = 'db_dump'
5656
if regexes['cisco_hash'].search(self.text) or regexes['cisco_pass'].search(self.text): self.type = 'Cisco'
57+
if regexes['honeypot'].search(self.text): self.type = 'honeypot'
5758
if regexes['google_api'].search(self.text): self.type = 'google_api'
5859
#if regexes['juniper'].search(self.text): self.type = 'Juniper'
5960
return self.type

lib/Pastebin.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -40,19 +40,19 @@ def monitor(self, bot, l_lock, t_lock):
4040
paste = self.get()
4141
self.ref_id = paste.id
4242
with l_lock:
43-
helper.log('Checking ' + paste.url)
43+
helper.log('[*] Checking ' + paste.url)
4444
paste.text = helper.download(paste.url)
4545
with l_lock:
4646
tweet = helper.build_tweet(paste)
4747
if tweet:
4848
print tweet
4949
with t_lock:
5050
helper.record(tweet)
51-
#bot.PostUpdate(paste.url, tweet)
51+
bot.PostUpdate(tweet)
5252
self.update()
5353
# If no new results... sleep for 5 sec
5454
while self.empty():
5555
with l_lock:
56-
helper.log('No results... sleeping')
56+
helper.log('[*] No results... sleeping')
5757
sleep(SLEEP_PASTEBIN)
5858
self.update()

lib/Pastie.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from bs4 import BeautifulSoup
44
import helper
55
from time import sleep
6-
from settings import SLEEP_SLEXY
6+
from settings import SLEEP_PASTIE
77

88
class PastiePaste(Paste):
99
def __init__(self, id):
@@ -19,8 +19,8 @@ def __init__(self, last_id=None):
1919
self.BASE_URL = 'http://pastie.org'
2020
super(Pastie, self).__init__()
2121
def update(self):
22-
'''update(self) - Fill Queue with new Slexy IDs'''
23-
print '[*] Retrieving Slexy ID\'s'
22+
'''update(self) - Fill Queue with new Pastie IDs'''
23+
print '[*] Retrieving Pastie ID\'s'
2424
results = [tag for tag in BeautifulSoup(helper.download(self.BASE_URL + '/pastes')).find_all('p','link') if tag.a]
2525
new_pastes = []
2626
if not self.ref_id: results = results[:60]
@@ -40,7 +40,7 @@ def monitor(self, bot, l_lock, t_lock):
4040
paste = self.get()
4141
self.ref_id = paste.id
4242
with l_lock:
43-
helper.log('Checking ' + paste.url)
43+
helper.log('[*] Checking ' + paste.url)
4444
# goober pastie - Not actually showing *raw* text.. Still need to parse it out
4545
paste.text = BeautifulSoup(helper.download(paste.url)).pre.text
4646
with l_lock:
@@ -49,11 +49,11 @@ def monitor(self, bot, l_lock, t_lock):
4949
print tweet
5050
with t_lock:
5151
helper.record(tweet)
52-
#bot.PostUpdate(paste.url, tweet)
52+
bot.PostUpdate(tweet)
5353
self.update()
5454
# If no new results... sleep for 5 sec
5555
while self.empty():
5656
with l_lock:
57-
helper.log('No results... sleeping')
57+
helper.log('[*] No results... sleeping')
5858
sleep(SLEEP_PASTIE)
5959
self.update()

lib/Slexy.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -40,19 +40,19 @@ def monitor(self, bot, l_lock, t_lock):
4040
paste = self.get()
4141
self.ref_id = paste.id
4242
with l_lock:
43-
helper.log('Checking ' + paste.url)
43+
helper.log('[*] Checking ' + paste.url)
4444
paste.text = helper.download(paste.url)
4545
with l_lock:
4646
tweet = helper.build_tweet(paste)
4747
if tweet:
4848
print tweet
4949
with t_lock:
5050
helper.record(tweet)
51-
#bot.PostUpdate(paste.url, tweet)
51+
bot.PostUpdate(tweet)
5252
self.update()
5353
# If no new results... sleep for 5 sec
5454
while self.empty():
5555
with l_lock:
56-
helper.log('No results... sleeping')
56+
helper.log('[*] No results... sleeping')
5757
sleep(SLEEP_SLEXY)
5858
self.update()

lib/helper.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66

77
import requests
88
import settings
9-
from time import sleep
9+
from time import sleep, strftime
1010

1111

1212
r = requests.Session()
@@ -30,7 +30,7 @@ def record(text):
3030
3131
'''
3232
with open(settings.tweet_history, 'a') as history:
33-
history.write(text + '\n')
33+
history.write(strftime('[%b %d, %Y %I:%M:%S]') + text + '\n')
3434

3535
def log(text):
3636
'''
@@ -62,6 +62,8 @@ def build_tweet(paste):
6262
tweet += ' Possible ' + paste.type + ' configuration'
6363
elif paste.type == 'ssh_private':
6464
tweet += ' Possible SSH private key'
65+
elif paste.type == 'honeypot':
66+
tweet += ' Dionaea Honeypot Log'
6567
if paste.num_emails > 0:
6668
print paste.emails
6769
return tweet

lib/regexes.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,13 @@
33
regexes = {
44
'email' : re.compile(r'[A-Z0-9._%+-]+@[A-Z0-9.-]+\.[A-Z]{2,4}', re.I),
55
#'ssn' : re.compile(r'\d{3}-?\d{2}-?\d{4}'),
6-
'hash32' : re.compile(r'[^A-F\d]([A-F\d]{32})[^A-F\d]', re.I),
7-
'FFF' : re.compile(r'(?<!color:\s)(?:#FFF)|FBI\s*Friday', re.I), # will need to work on this to not match CSS
6+
'hash32' : re.compile(r'[^A-F\d/]([A-F\d]{32})[^A-F\d]', re.I),
7+
'FFF' : re.compile(r'FBI\s*Friday', re.I), # will need to work on this to not match CSS
88
'lulz' : re.compile(r'(lulzsec|antisec)', re.I),
99
'cisco_hash' : re.compile(r'enable\s+secret', re.I),
1010
'cisco_pass' : re.compile(r'enable\s+password', re.I),
1111
'google_api' : re.compile(r'(AIza.{35})'),
12+
'honeypot' : re.compile(r'<dionaea\.capture>', re.I),
1213
'db_keywords' : [
1314
re.compile(r'((customers?|email|users?|members?|acc(?:oun)?ts?)([-_|/\s]?(address|name|id[^")a-zA-Z0-9_]|[-_:|/\\])))', re.I),
1415
re.compile(r'((\W?pass(wor)?d|hash)[\s|:])', re.I),

settings.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,11 @@
99
# Thresholds
1010
EMAIL_THRESHOLD = 20
1111
HASH_THRESHOLD = 30
12-
DB_KEYWORDS_THRESHOLD = .65
12+
DB_KEYWORDS_THRESHOLD = .55
1313

1414
# Time to Sleep for each site
1515
SLEEP_SLEXY = 60
16-
SLEEP_PASTEBIN = 10
16+
SLEEP_PASTEBIN = 15
1717
SLEEP_PASTIE = 30
1818

1919
# Other configuration

0 commit comments

Comments
 (0)