forked from openwpm/OpenWPM
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathcrawler.py
88 lines (68 loc) · 3.42 KB
/
crawler.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
from automation import TaskManager, CommandSequence
from seeds import alexa500, bottom500
# The list of sites that we wish to crawl
NUM_BROWSERS = 4
# sites = alexa500.domains
# sites = ['http://9gag.com/']
# sites = ['http://279cd9d8.ngrok.io']
sites = bottom500.domains
# Loads the manager preference and 3 copies of the default browser dictionaries
manager_params, browser_params = TaskManager.load_default_params(NUM_BROWSERS)
# # Update browser configuration (use this for per-browser settings)
for i in xrange(NUM_BROWSERS):
browser_params[i]['headless'] = True #Launch only browser 0 not headless
browser_params[i]['js_instrument'] = True
browser_params[i]['save_javascript'] = False
browser_params[i]['cookie_instrument'] = False
browser_params[i]['cp_instrument'] = False
browser_params[i]['http_instrument'] = True
# Update TaskManager configuration (use this for crawl-wide settings)
manager_params['data_directory'] = '~/study/thesis/OpenWPM/bresults'
manager_params['log_directory'] = '~/study/thesis/OpenWPM/bresults'
# Instantiates the measurement platform
# Commands time out by default after 60 seconds
manager = TaskManager.TaskManager(manager_params, browser_params)
# Visits the sites with all browsers simultaneously
for site in sites:
command_sequence = CommandSequence.CommandSequence(site)
# manager.execute_command_sequence(command_sequence, index=None)
# Start by visiting the page
command_sequence.get(sleep=2, timeout=600)
# dump_profile_cookies/dump_flash_cookies closes the current tab.
command_sequence.dump_profile_cookies(120)
manager.execute_command_sequence(command_sequence, index=None) # ** = synchronized browsers
# Shuts down the browsers and waits for the data to finish logging
manager.close()
# The list of sites that we wish to crawl
NUM_BROWSERS = 4
sites = alexa500.domains
# sites = ['http://9gag.com/']
# sites = ['http://279cd9d8.ngrok.io']
# sites = bottom500.domains
# Loads the manager preference and 3 copies of the default browser dictionaries
manager_params, browser_params = TaskManager.load_default_params(NUM_BROWSERS)
# # Update browser configuration (use this for per-browser settings)
for i in xrange(NUM_BROWSERS):
browser_params[i]['headless'] = True #Launch only browser 0 not headless
browser_params[i]['js_instrument'] = True
browser_params[i]['save_javascript'] = False
browser_params[i]['cookie_instrument'] = False
browser_params[i]['cp_instrument'] = False
browser_params[i]['http_instrument'] = True
# Update TaskManager configuration (use this for crawl-wide settings)
manager_params['data_directory'] = '~/study/thesis/OpenWPM/tresults'
manager_params['log_directory'] = '~/study/thesis/OpenWPM/tresults'
# Instantiates the measurement platform
# Commands time out by default after 60 seconds
manager = TaskManager.TaskManager(manager_params, browser_params)
# Visits the sites with all browsers simultaneously
for site in sites:
command_sequence = CommandSequence.CommandSequence(site)
# manager.execute_command_sequence(command_sequence, index=None)
# Start by visiting the page
command_sequence.get(sleep=2, timeout=600)
# dump_profile_cookies/dump_flash_cookies closes the current tab.
command_sequence.dump_profile_cookies(120)
manager.execute_command_sequence(command_sequence, index=None) # ** = synchronized browsers
# Shuts down the browsers and waits for the data to finish logging
manager.close()