Skip to content

Commit 2dbbad7

Browse files
committed
Merge branch 'cleaning' of https://github.com/maxme/dumpmon into maxme-cleaning
2 parents 287aea1 + 2e5dea3 commit 2dbbad7

12 files changed

+440
-363
lines changed

.gitignore

+7-2
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,14 @@
1-
#python specific
1+
# project
2+
settings.py
3+
output.log
4+
5+
# python specific
26
*.pyc
37

4-
## generic files to ignore
8+
# generic files to ignore
59
*~
610
*.lock
711
*.DS_Store
812
*.swp
913
*.out
14+

Readme.md

+1
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ For more overview, check out the blog post [here.](http://raidersec.blogspot.com
66

77
## Dependencies
88
[python-twitter](https://code.google.com/p/python-twitter/)
9+
$ pip install python-twitter
910
$ pip install beautifulsoup4
1011
$ pip install requests
1112

dumpmon.py

+27-23
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
# dumpmon.py
1+
# dumpmon.py
22
# Author: Jordan Wright
33
# Version: 0.0 (in dev)
44

@@ -18,36 +18,40 @@
1818
from settings import CONSUMER_KEY, CONSUMER_SECRET, ACCESS_TOKEN, ACCESS_TOKEN_SECRET
1919
import threading
2020

21+
2122
def monitor():
22-
'''
23-
monitor() - Main function... creates and starts threads
23+
'''
24+
monitor() - Main function... creates and starts threads
2425
25-
'''
26-
log('[*] Monitoring...')
27-
log('[*] Ctrl+C to quit')
28-
bot = twitter.Api(consumer_key=CONSUMER_KEY,
26+
'''
27+
log('[*] Monitoring...')
28+
log('[*] Ctrl+C to quit')
29+
bot = twitter.Api(consumer_key=CONSUMER_KEY,
2930
consumer_secret=CONSUMER_SECRET,
3031
access_token_key=ACCESS_TOKEN,
3132
access_token_secret=ACCESS_TOKEN_SECRET)
32-
# Create lock for both output log and tweet action
33-
log_lock = threading.Lock()
34-
tweet_lock = threading.Lock()
33+
# Create lock for both output log and tweet action
34+
log_lock = threading.Lock()
35+
tweet_lock = threading.Lock()
3536

36-
pastebin_thread = threading.Thread(target=Pastebin().monitor, args=[bot,log_lock, tweet_lock])
37-
slexy_thread = threading.Thread(target=Slexy().monitor, args=[bot,log_lock, tweet_lock])
38-
pastie_thead = threading.Thread(target=Pastie().monitor, args=[bot,log_lock, tweet_lock])
37+
pastebin_thread = threading.Thread(
38+
target=Pastebin().monitor, args=[bot, log_lock, tweet_lock])
39+
slexy_thread = threading.Thread(
40+
target=Slexy().monitor, args=[bot, log_lock, tweet_lock])
41+
pastie_thead = threading.Thread(
42+
target=Pastie().monitor, args=[bot, log_lock, tweet_lock])
3943

40-
for thread in (pastebin_thread, slexy_thread, pastie_thead):
41-
thread.daemon = True
42-
thread.start()
44+
for thread in (pastebin_thread, slexy_thread, pastie_thead):
45+
thread.daemon = True
46+
thread.start()
4347

44-
# Let threads run
45-
try:
46-
while(1):
47-
sleep(5)
48-
except KeyboardInterrupt:
49-
log('Stopped.')
48+
# Let threads run
49+
try:
50+
while(1):
51+
sleep(5)
52+
except KeyboardInterrupt:
53+
log('Stopped.')
5054

5155

5256
if __name__ == "__main__":
53-
monitor()
57+
monitor()

lib/Paste.py

+61-54
Original file line numberDiff line numberDiff line change
@@ -1,60 +1,67 @@
1-
from regexes import regexes
1+
from .regexes import regexes
22
import settings
33

4+
45
def log(text):
5-
'''
6-
log(text): Logs message to both STDOUT and to .output_log file
6+
'''
7+
log(text): Logs message to both STDOUT and to .output_log file
8+
9+
'''
10+
if text:
11+
print(text.encode('utf-8'))
12+
with open(settings.log_file, 'a') as logfile:
13+
logfile.write(text.encode('utf-8') + '\n')
714

8-
'''
9-
if text:
10-
print text.encode('utf-8')
11-
with open(settings.log_file, 'a') as logfile:
12-
logfile.write(text.encode('utf-8') + '\n')
1315

1416
class Paste(object):
15-
def __init__(self):
16-
'''
17-
class Paste: Generic "Paste" object to contain attributes of a standard paste
18-
19-
'''
20-
self.emails = 0
21-
self.hashes = 0
22-
self.num_emails = 0
23-
self.num_hashes = 0
24-
self.text = None
25-
self.type = None
26-
self.db_keywords = 0.0
27-
28-
def match(self):
29-
'''
30-
Matches the paste against a series of regular expressions to determine if the paste is 'interesting'
31-
32-
Sets the following attributes:
33-
self.emails
34-
self.hashes
35-
self.num_emails
36-
self.num_hashes
37-
self.db_keywords
38-
self.type
39-
40-
'''
41-
# Get the amount of emails
42-
self.emails = list(set(regexes['email'].findall(self.text)))
43-
self.hashes = regexes['hash32'].findall(self.text)
44-
self.num_emails = len(self.emails)
45-
self.num_hashes = len(self.hashes)
46-
for regex in regexes['db_keywords']:
47-
if regex.search(self.text):
48-
log('\t[+] ' + regex.search(self.text).group(1))
49-
self.db_keywords += round(1/float(len(regexes['db_keywords'])), 2)
50-
for regex in regexes['blacklist']:
51-
if regex.search(self.text):
52-
log('\t[-] ' + regex.search(self.text).group(1))
53-
self.db_keywords -= round(1.25 * (1/float(len(regexes['db_keywords']))), 2)
54-
if (self.num_emails >= settings.EMAIL_THRESHOLD) or (self.num_hashes >= settings.HASH_THRESHOLD) or (self.db_keywords >= settings.DB_KEYWORDS_THRESHOLD):
55-
self.type = 'db_dump'
56-
if regexes['cisco_hash'].search(self.text) or regexes['cisco_pass'].search(self.text): self.type = 'Cisco'
57-
if regexes['honeypot'].search(self.text): self.type = 'honeypot'
58-
if regexes['google_api'].search(self.text): self.type = 'google_api'
59-
#if regexes['juniper'].search(self.text): self.type = 'Juniper'
60-
return self.type
17+
def __init__(self):
18+
'''
19+
class Paste: Generic "Paste" object to contain attributes of a standard paste
20+
21+
'''
22+
self.emails = 0
23+
self.hashes = 0
24+
self.num_emails = 0
25+
self.num_hashes = 0
26+
self.text = None
27+
self.type = None
28+
self.db_keywords = 0.0
29+
30+
def match(self):
31+
'''
32+
Matches the paste against a series of regular expressions to determine if the paste is 'interesting'
33+
34+
Sets the following attributes:
35+
self.emails
36+
self.hashes
37+
self.num_emails
38+
self.num_hashes
39+
self.db_keywords
40+
self.type
41+
42+
'''
43+
# Get the amount of emails
44+
self.emails = list(set(regexes['email'].findall(self.text)))
45+
self.hashes = regexes['hash32'].findall(self.text)
46+
self.num_emails = len(self.emails)
47+
self.num_hashes = len(self.hashes)
48+
for regex in regexes['db_keywords']:
49+
if regex.search(self.text):
50+
log('\t[+] ' + regex.search(self.text).group(1))
51+
self.db_keywords += round(1/float(
52+
len(regexes['db_keywords'])), 2)
53+
for regex in regexes['blacklist']:
54+
if regex.search(self.text):
55+
log('\t[-] ' + regex.search(self.text).group(1))
56+
self.db_keywords -= round(1.25 * (
57+
1/float(len(regexes['db_keywords']))), 2)
58+
if (self.num_emails >= settings.EMAIL_THRESHOLD) or (self.num_hashes >= settings.HASH_THRESHOLD) or (self.db_keywords >= settings.DB_KEYWORDS_THRESHOLD):
59+
self.type = 'db_dump'
60+
if regexes['cisco_hash'].search(self.text) or regexes['cisco_pass'].search(self.text):
61+
self.type = 'Cisco'
62+
if regexes['honeypot'].search(self.text):
63+
self.type = 'honeypot'
64+
if regexes['google_api'].search(self.text):
65+
self.type = 'google_api'
66+
# if regexes['juniper'].search(self.text): self.type = 'Juniper'
67+
return self.type

lib/Pastebin.py

+61-54
Original file line numberDiff line numberDiff line change
@@ -1,62 +1,69 @@
1-
from Site import Site
2-
from Paste import Paste
1+
from .Site import Site
2+
from .Paste import Paste
33
from bs4 import BeautifulSoup
4-
import helper
4+
from . import helper
55
from time import sleep
66
from settings import SLEEP_PASTEBIN
77
from twitter import TwitterError
88

9+
910
class PastebinPaste(Paste):
10-
def __init__(self, id):
11-
self.id = id
12-
self.headers = None
13-
self.url = 'http://pastebin.com/raw.php?i=' + self.id
14-
super(PastebinPaste, self).__init__()
11+
def __init__(self, id):
12+
self.id = id
13+
self.headers = None
14+
self.url = 'http://pastebin.com/raw.php?i=' + self.id
15+
super(PastebinPaste, self).__init__()
16+
1517

1618
class Pastebin(Site):
17-
def __init__(self, last_id=None):
18-
if not last_id: last_id = None
19-
self.ref_id = last_id
20-
self.BASE_URL = 'http://pastebin.com'
21-
super(Pastebin, self).__init__()
22-
def update(self):
23-
'''update(self) - Fill Queue with new Pastebin IDs'''
24-
print '[*] Retrieving Pastebin ID\'s'
25-
results = BeautifulSoup(helper.download(self.BASE_URL + '/archive')).find_all(lambda tag: tag.name=='td' and tag.a and '/archive/' not in tag.a['href'] and tag.a['href'][1:])
26-
new_pastes = []
27-
if not self.ref_id: results = results[:60]
28-
for entry in results:
29-
paste = PastebinPaste(entry.a['href'][1:])
30-
# Check to see if we found our last checked URL
31-
if paste.id == self.ref_id:
32-
break
33-
new_pastes.append(paste)
34-
for entry in new_pastes[::-1]:
35-
print '[+] Adding URL: ' + entry.url
36-
self.put(entry)
37-
def monitor(self, bot, l_lock, t_lock):
38-
self.update()
39-
while(1):
40-
while not self.empty():
41-
paste = self.get()
42-
self.ref_id = paste.id
43-
with l_lock:
44-
helper.log('[*] Checking ' + paste.url)
45-
paste.text = helper.download(paste.url)
46-
with l_lock:
47-
tweet = helper.build_tweet(paste)
48-
if tweet:
49-
print tweet
50-
with t_lock:
51-
helper.record(tweet)
52-
try:
53-
bot.PostUpdate(tweet)
54-
except TwitterError:
55-
pass
56-
self.update()
57-
# If no new results... sleep for 5 sec
58-
while self.empty():
59-
with l_lock:
60-
helper.log('[*] No results... sleeping')
61-
sleep(SLEEP_PASTEBIN)
62-
self.update()
19+
def __init__(self, last_id=None):
20+
if not last_id:
21+
last_id = None
22+
self.ref_id = last_id
23+
self.BASE_URL = 'http://pastebin.com'
24+
super(Pastebin, self).__init__()
25+
26+
def update(self):
27+
'''update(self) - Fill Queue with new Pastebin IDs'''
28+
print('[*] Retrieving Pastebin ID\'s')
29+
results = BeautifulSoup(helper.download(self.BASE_URL + '/archive')).find_all(
30+
lambda tag: tag.name == 'td' and tag.a and '/archive/' not in tag.a['href'] and tag.a['href'][1:])
31+
new_pastes = []
32+
if not self.ref_id:
33+
results = results[:60]
34+
for entry in results:
35+
paste = PastebinPaste(entry.a['href'][1:])
36+
# Check to see if we found our last checked URL
37+
if paste.id == self.ref_id:
38+
break
39+
new_pastes.append(paste)
40+
for entry in new_pastes[::-1]:
41+
print('[+] Adding URL: ' + entry.url)
42+
self.put(entry)
43+
44+
def monitor(self, bot, l_lock, t_lock):
45+
self.update()
46+
while(1):
47+
while not self.empty():
48+
paste = self.get()
49+
self.ref_id = paste.id
50+
with l_lock:
51+
helper.log('[*] Checking ' + paste.url)
52+
paste.text = helper.download(paste.url)
53+
with l_lock:
54+
tweet = helper.build_tweet(paste)
55+
if tweet:
56+
print(tweet)
57+
with t_lock:
58+
helper.record(tweet)
59+
try:
60+
bot.PostUpdate(tweet)
61+
except TwitterError:
62+
pass
63+
self.update()
64+
# If no new results... sleep for 5 sec
65+
while self.empty():
66+
with l_lock:
67+
helper.log('[*] No results... sleeping')
68+
sleep(SLEEP_PASTEBIN)
69+
self.update()

0 commit comments

Comments
 (0)