mirror of
https://github.com/internetarchive/brozzler.git
synced 2025-02-24 08:39:59 -05:00
183 lines
6.0 KiB
Python
Executable File
183 lines
6.0 KiB
Python
Executable File
#!/usr/bin/env python
|
|
# vim: set sw=4 et:
|
|
|
|
import argparse
|
|
import os
|
|
import sys
|
|
import logging
|
|
import umbra
|
|
import threading
|
|
import time
|
|
import sortedcontainers
|
|
import surt
|
|
import signal
|
|
|
|
arg_parser = argparse.ArgumentParser(prog=os.path.basename(__file__),
|
|
description='crawl-url - browse urls, follow links',
|
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
|
arg_parser.add_argument('urls', metavar='URL', nargs='+', help='URL(s) to browse')
|
|
arg_parser.add_argument('-w', '--browser-wait', dest='browser_wait', default='60',
|
|
help='seconds to wait for browser initialization')
|
|
arg_parser.add_argument('-e', '--executable', dest='chrome_exe', default='chromium-browser',
|
|
help='executable to use to invoke chrome')
|
|
arg_parser.add_argument('-n', '--max-browsers', dest='max_browsers', default='1',
|
|
help='max number of chrome instances simultaneously browsing pages')
|
|
arg_parser.add_argument('-v', '--verbose', dest='log_level',
|
|
action="store_const", default=logging.INFO, const=logging.DEBUG)
|
|
arg_parser.add_argument('--version', action='version',
|
|
version="umbra {} - {}".format(umbra.version, os.path.basename(__file__)))
|
|
args = arg_parser.parse_args(args=sys.argv[1:])
|
|
|
|
logging.basicConfig(stream=sys.stdout, level=args.log_level,
|
|
format='%(asctime)s %(process)d %(levelname)s %(threadName)s %(name)s.%(funcName)s(%(filename)s:%(lineno)d) %(message)s')
|
|
|
|
class CrawlUrl:
|
|
def __init__(self, url, priority=None, hops_from_seed=0):
|
|
self.url = url
|
|
self.hops_from_seed = hops_from_seed
|
|
self._surt = None
|
|
if priority:
|
|
self.set_priority(priority)
|
|
else:
|
|
self.set_priority(self.calc_priority())
|
|
|
|
def set_priority(self, priority):
|
|
# priority_key is both a sortable priority (higher value is higher
|
|
# priority) and a unique hash key
|
|
self.priority_key = (priority << 32) | (hash(self.surt) & (2**32 - 1))
|
|
|
|
def calc_priority(self):
|
|
priority = 0
|
|
priority += max(0, 10 - self.hops_from_seed)
|
|
priority += max(0, 6 - self.surt.count('/'))
|
|
return priority
|
|
|
|
def __repr__(self):
|
|
return """CrawlUrl(url="{}",priority={},hops_from_seed={})""".format(self.url, self.priority, self.hops_from_seed)
|
|
|
|
@property
|
|
def surt(self):
|
|
if self._surt is None:
|
|
self._surt = surt.surt(self.url, canonicalizer=surt.GoogleURLCanonicalizer, trailing_comma=True)
|
|
return self._surt
|
|
|
|
@property
|
|
def priority(self):
|
|
return self.priority_key >> 32
|
|
|
|
class CrawlUrlQueue:
|
|
def __init__(self):
|
|
# {priority_key:CrawlUrl}
|
|
self._pq = sortedcontainers.SortedDict()
|
|
# {surt:CrawlUrl}
|
|
self._urls = {}
|
|
self.aggregate_priority = 0
|
|
|
|
def __len__(self):
|
|
assert len(self._urls) == len(self._pq)
|
|
return len(self._urls)
|
|
|
|
def schedule(self, crawl_url):
|
|
self.aggregate_priority += crawl_url.priority
|
|
|
|
try:
|
|
old_priority_key = self._urls.pop(crawl_url.surt).priority_key
|
|
old_crawl_url = self._pq.pop(old_priority_key)
|
|
|
|
# XXX dumb calculation of new priority, may not belong here
|
|
crawl_url.set_priority(crawl_url.priority + old_crawl_url.priority)
|
|
crawl_url.hops_from_seed = min(old_crawl_url.hops_from_seed, crawl_url.hops_from_seed)
|
|
except KeyError:
|
|
pass
|
|
|
|
self._urls[crawl_url.surt] = crawl_url
|
|
self._pq[crawl_url.priority_key] = crawl_url
|
|
|
|
def next_url(self):
|
|
res0 = self._pq.popitem(last=True)[1]
|
|
res1 = self._urls.pop(res0.surt)
|
|
assert res0 is res1
|
|
|
|
new_low_priority = CrawlUrl(res0.url, priority=-1000, hops_from_seed=res0.hops_from_seed)
|
|
self.schedule(new_low_priority)
|
|
|
|
return res0
|
|
|
|
class Site:
|
|
"""A seed url, scope definition, and prioritized url queue."""
|
|
def __init__(self, seed_url):
|
|
self.seed = CrawlUrl(seed_url, priority=1000)
|
|
|
|
self.q = CrawlUrlQueue()
|
|
self.q.schedule(self.seed)
|
|
|
|
def is_in_scope(self, url):
|
|
surtt = surt.surt(url, canonicalizer=surt.GoogleURLCanonicalizer, trailing_comma=True)
|
|
return surtt.startswith(self.seed.surt)
|
|
|
|
def submit(self, urls, hops_from_seed):
|
|
for url in urls:
|
|
if self.is_in_scope(url):
|
|
logging.debug("{} accepted {}".format(self.seed.surt, url))
|
|
crawl_url = CrawlUrl(url, hops_from_seed=hops_from_seed)
|
|
self.q.schedule(crawl_url)
|
|
else:
|
|
logging.debug("{} rejected {}".format(self.seed.surt, url))
|
|
|
|
browsers = []
|
|
browsers_lock = threading.Lock()
|
|
|
|
# "browse" + "crawl" = "brozzle"
|
|
def brozzle_site(site, chrome_port):
|
|
with umbra.Browser(chrome_port=chrome_port, chrome_exe=args.chrome_exe) as browser:
|
|
with browsers_lock:
|
|
browsers.append(browser)
|
|
|
|
try:
|
|
while True:
|
|
crawl_url = site.q.next_url()
|
|
logging.info("crawling {}".format(crawl_url))
|
|
outlinks = browser.browse_page(crawl_url.url)
|
|
site.submit(outlinks, hops_from_seed=crawl_url.hops_from_seed+1)
|
|
except umbra.browser.BrowsingAborted:
|
|
pass
|
|
|
|
class ShutdownRequested(Exception):
|
|
pass
|
|
|
|
def sigterm(signum, frame):
|
|
raise ShutdownRequested('shutdown requested (caught SIGTERM)')
|
|
def sigint(signum, frame):
|
|
raise ShutdownRequested('shutdown requested (caught SIGINT)')
|
|
|
|
signal.signal(signal.SIGTERM, sigterm)
|
|
signal.signal(signal.SIGINT, sigint)
|
|
|
|
chrome_port = 9200
|
|
for seed_url in args.urls:
|
|
site = Site(seed_url)
|
|
|
|
th = threading.Thread(target=lambda: brozzle_site(site, chrome_port),
|
|
name="BrowsingThread-{}".format(site.seed.surt))
|
|
th.start()
|
|
|
|
chrome_port += 1
|
|
|
|
try:
|
|
while True:
|
|
time.sleep(0.5)
|
|
except ShutdownRequested as e:
|
|
pass
|
|
|
|
logging.info("shutting down browsers")
|
|
|
|
with browsers_lock:
|
|
for browser in browsers:
|
|
browser.abort_browse_page()
|
|
|
|
for th in threading.enumerate():
|
|
if th != threading.current_thread():
|
|
th.join()
|
|
|
|
logging.info("all done, exiting")
|