mirror of
https://github.com/internetarchive/brozzler.git
synced 2025-08-09 06:52:46 -04:00
fancier prioritization takes into account hops from seed, path depth; and clean shutdown
This commit is contained in:
parent
5f3c247e0c
commit
fcc63b6675
2 changed files with 69 additions and 12 deletions
|
@ -10,6 +10,7 @@ import threading
|
|||
import time
|
||||
import sortedcontainers
|
||||
import surt
|
||||
import signal
|
||||
|
||||
arg_parser = argparse.ArgumentParser(prog=os.path.basename(__file__),
|
||||
description='browse-url - open urls in chrome/chromium and run behaviors',
|
||||
|
@ -31,16 +32,29 @@ logging.basicConfig(stream=sys.stdout, level=args.log_level,
|
|||
format='%(asctime)s %(process)d %(levelname)s %(threadName)s %(name)s.%(funcName)s(%(filename)s:%(lineno)d) %(message)s')
|
||||
|
||||
class CrawlUrl:
|
||||
def __init__(self, url, priority=1):
|
||||
def __init__(self, url, priority=None, hops_from_seed=0):
|
||||
self.url = url
|
||||
self.hops_from_seed = hops_from_seed
|
||||
self._surt = None
|
||||
self.set_priority(priority)
|
||||
if priority:
|
||||
self.set_priority(priority)
|
||||
else:
|
||||
self.set_priority(self.calc_priority())
|
||||
|
||||
def set_priority(self, priority):
|
||||
# priority_key is both a sortable priority (higher value is higher
|
||||
# priority) and a unique hash key
|
||||
self.priority_key = (priority << 32) | (hash(self.surt) & (2**32 - 1))
|
||||
|
||||
def calc_priority(self):
|
||||
priority = 0
|
||||
priority += max(0, 10 - self.hops_from_seed)
|
||||
priority += max(0, 6 - self.surt.count('/'))
|
||||
return priority
|
||||
|
||||
def __repr__(self):
|
||||
return """CrawlUrl(url="{}",priority={},hops_from_seed={})""".format(self.url, self.priority, self.hops_from_seed)
|
||||
|
||||
@property
|
||||
def surt(self):
|
||||
if self._surt is None:
|
||||
|
@ -72,6 +86,7 @@ class CrawlUrlQueue:
|
|||
|
||||
# XXX dumb calculation of new priority, may not belong here
|
||||
crawl_url.set_priority(crawl_url.priority + old_crawl_url.priority)
|
||||
crawl_url.hops_from_seed = min(old_crawl_url.hops_from_seed, crawl_url.hops_from_seed)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
|
@ -83,7 +98,7 @@ class CrawlUrlQueue:
|
|||
res1 = self._urls.pop(res0.surt)
|
||||
assert res0 is res1
|
||||
|
||||
new_low_priority = CrawlUrl(res0.url, -1000)
|
||||
new_low_priority = CrawlUrl(res0.url, priority=-1000, hops_from_seed=res0.hops_from_seed)
|
||||
self.schedule(new_low_priority)
|
||||
|
||||
return res0
|
||||
|
@ -100,21 +115,43 @@ class Site:
|
|||
surtt = surt.surt(url, canonicalizer=surt.GoogleURLCanonicalizer, trailing_comma=True)
|
||||
return surtt.startswith(self.seed.surt)
|
||||
|
||||
def submit(self, urls):
|
||||
def submit(self, urls, hops_from_seed):
|
||||
for url in urls:
|
||||
if self.is_in_scope(url):
|
||||
logging.info("{} accepted {}".format(self.seed.surt, url))
|
||||
self.q.schedule(CrawlUrl(url))
|
||||
logging.debug("{} accepted {}".format(self.seed.surt, url))
|
||||
crawl_url = CrawlUrl(url, hops_from_seed=hops_from_seed)
|
||||
self.q.schedule(crawl_url)
|
||||
else:
|
||||
logging.info("{} rejected {}".format(self.seed.surt, url))
|
||||
logging.debug("{} rejected {}".format(self.seed.surt, url))
|
||||
|
||||
browsers = []
|
||||
browsers_lock = threading.Lock()
|
||||
|
||||
# "browse" + "crawl" = "brozzle"
|
||||
def brozzle_site(site, chrome_port):
|
||||
with umbra.Browser(chrome_port=chrome_port, chrome_exe=args.chrome_exe) as browser:
|
||||
while True:
|
||||
crawl_url = site.q.next_url()
|
||||
outlinks = browser.browse_page(crawl_url.url)
|
||||
site.submit(outlinks)
|
||||
with browsers_lock:
|
||||
browsers.append(browser)
|
||||
|
||||
try:
|
||||
while True:
|
||||
crawl_url = site.q.next_url()
|
||||
logging.info("crawling {}".format(crawl_url))
|
||||
outlinks = browser.browse_page(crawl_url.url)
|
||||
site.submit(outlinks, hops_from_seed=crawl_url.hops_from_seed+1)
|
||||
except umbra.browser.BrowsingAborted:
|
||||
pass
|
||||
|
||||
class ShutdownRequested(Exception):
|
||||
pass
|
||||
|
||||
def sigterm(signum, frame):
|
||||
raise ShutdownRequested('shutdown requested (caught SIGTERM)')
|
||||
def sigint(signum, frame):
|
||||
raise ShutdownRequested('shutdown requested (caught SIGINT)')
|
||||
|
||||
signal.signal(signal.SIGTERM, sigterm)
|
||||
signal.signal(signal.SIGINT, sigint)
|
||||
|
||||
chrome_port = 9200
|
||||
for seed_url in args.urls:
|
||||
|
@ -126,3 +163,20 @@ for seed_url in args.urls:
|
|||
|
||||
chrome_port += 1
|
||||
|
||||
try:
|
||||
while True:
|
||||
time.sleep(0.5)
|
||||
except ShutdownRequested as e:
|
||||
pass
|
||||
|
||||
logging.info("shutting down browsers")
|
||||
|
||||
with browsers_lock:
|
||||
for browser in browsers:
|
||||
browser.abort_browse_page()
|
||||
|
||||
for th in threading.enumerate():
|
||||
if th != threading.current_thread():
|
||||
th.join()
|
||||
|
||||
logging.info("all done, exiting")
|
||||
|
|
|
@ -53,6 +53,9 @@ class BrowserPool:
|
|||
class BrowsingException(Exception):
|
||||
pass
|
||||
|
||||
class BrowsingAborted(BrowsingException):
|
||||
pass
|
||||
|
||||
class Browser:
|
||||
"""Runs chrome/chromium to synchronously browse one page at a time using
|
||||
worker.browse_page(). Currently the implementation starts up a new instance
|
||||
|
@ -162,7 +165,7 @@ class Browser:
|
|||
self.logger.info("finished browsing page, reached hard timeout of {} seconds url={}".format(Browser.HARD_TIMEOUT_SECONDS, self.url))
|
||||
return True
|
||||
elif self._abort_browse_page:
|
||||
raise BrowsingException("browsing page aborted")
|
||||
raise BrowsingAborted("browsing page aborted")
|
||||
|
||||
def send_to_chrome(self, suppress_logging=False, **kwargs):
|
||||
msg_id = next(self.command_id)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue