forked from openwpm/OpenWPM
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdemo.py
71 lines (58 loc) · 2.56 KB
/
demo.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
from custom_command import LinkCountingCommand
from openwpm.command_sequence import CommandSequence
from openwpm.commands.browser_commands import GetCommand
from openwpm.config import BrowserParams, ManagerParams
from openwpm.task_manager import TaskManager
# The list of sites that we wish to crawl
NUM_BROWSERS = 1
sites = [
"http://www.example.com",
"http://www.princeton.edu",
"http://citp.princeton.edu/",
]
# Loads the default ManagerParams
# and NUM_BROWSERS copies of the default BrowserParams
manager_params = ManagerParams(
num_browsers=NUM_BROWSERS
) # num_browsers is necessary to let TaskManager know how many browsers to spawn
browser_params = [BrowserParams(display_mode="headless") for _ in range(NUM_BROWSERS)]
# Update browser configuration (use this for per-browser settings)
for i in range(NUM_BROWSERS):
# Record HTTP Requests and Responses
browser_params[i].http_instrument = True
# Record cookie changes
browser_params[i].cookie_instrument = True
# Record Navigations
browser_params[i].navigation_instrument = True
# Record JS Web API calls
browser_params[i].js_instrument = True
# Record the callstack of all WebRequests made
browser_params[i].callstack_instrument = True
# Record DNS resolution
browser_params[i].dns_instrument = True
# Update TaskManager configuration (use this for crawl-wide settings)
manager_params.data_directory = "~/Desktop/"
manager_params.log_directory = "~/Desktop/"
# memory_watchdog and process_watchdog are useful for large scale cloud crawls.
# Please refer to docs/Configuration.md#platform-configuration-options for more information
# manager_params.memory_watchdog = True
# manager_params.process_watchdog = True
# Instantiates the measurement platform
# Commands time out by default after 60 seconds
manager = TaskManager(manager_params, browser_params)
# Visits the sites
for site in sites:
# Parallelize sites over all number of browsers set above.
command_sequence = CommandSequence(
site,
reset=True,
callback=lambda success, val=site: print("CommandSequence {} done".format(val)),
)
# Start by visiting the page
command_sequence.append_command(GetCommand(url=site, sleep=3), timeout=60)
# Have a look at custom_command.py to see how to implement your own command
command_sequence.append_command(LinkCountingCommand())
# Run commands across the three browsers (simple parallelization)
manager.execute_command_sequence(command_sequence)
# Shuts down the browsers and waits for the data to finish logging
manager.close()