mirror of
https://github.com/internetarchive/brozzler.git
synced 2025-08-08 14:32:23 -04:00
brozzler-worker round-robins sites needing crawling
This commit is contained in:
parent
ddd764cac5
commit
eb74967fed
2 changed files with 56 additions and 47 deletions
|
@ -35,10 +35,6 @@ args = arg_parser.parse_args(args=sys.argv[1:])
|
||||||
logging.basicConfig(stream=sys.stdout, level=args.log_level,
|
logging.basicConfig(stream=sys.stdout, level=args.log_level,
|
||||||
format='%(asctime)s %(process)d %(levelname)s %(threadName)s %(name)s.%(funcName)s(%(filename)s:%(lineno)d) %(message)s')
|
format='%(asctime)s %(process)d %(levelname)s %(threadName)s %(name)s.%(funcName)s(%(filename)s:%(lineno)d) %(message)s')
|
||||||
|
|
||||||
browsers = set()
|
|
||||||
browsers_lock = threading.Lock()
|
|
||||||
num_browsers = 0
|
|
||||||
|
|
||||||
shutdown_requested = threading.Event()
|
shutdown_requested = threading.Event()
|
||||||
|
|
||||||
def next_url(site):
|
def next_url(site):
|
||||||
|
@ -51,34 +47,39 @@ def next_url(site):
|
||||||
msg.ack()
|
msg.ack()
|
||||||
return crawl_url
|
return crawl_url
|
||||||
|
|
||||||
def completed(site, crawl_url):
|
def completed_url(site, crawl_url):
|
||||||
with kombu.Connection(args.amqp_url) as conn:
|
with kombu.Connection(args.amqp_url) as conn:
|
||||||
q = conn.SimpleQueue("brozzler.sites.{}.completed_urls".format(site.id))
|
q = conn.SimpleQueue("brozzler.sites.{}.completed_urls".format(site.id))
|
||||||
logging.info("putting {} on queue {}".format(crawl_url, q.queue.name))
|
logging.info("putting {} on queue {}".format(crawl_url, q.queue.name))
|
||||||
q.put(crawl_url.to_dict())
|
q.put(crawl_url.to_dict())
|
||||||
|
|
||||||
def brozzle_site(site, chrome_port):
|
def disclaim_site(site):
|
||||||
with umbra.Browser(chrome_port=chrome_port, chrome_exe=args.chrome_exe,
|
# XXX maybe should put on "disclaimed" queue and hq should put back on "unclaimed"
|
||||||
proxy_server=args.proxy_server, ignore_cert_errors=args.ignore_cert_errors) as browser:
|
with kombu.Connection(args.amqp_url) as conn:
|
||||||
with browsers_lock:
|
q = conn.SimpleQueue("brozzler.sites.unclaimed".format(site.id))
|
||||||
browsers.add(browser)
|
logging.info("putting {} on queue {}".format(site, q.queue.name))
|
||||||
try:
|
q.put(site.to_dict())
|
||||||
while not shutdown_requested.is_set():
|
|
||||||
|
def brozzle_site(site, browser):
|
||||||
|
start = time.time()
|
||||||
|
try:
|
||||||
|
with browser:
|
||||||
|
while not shutdown_requested.is_set() and time.time() - start < 60:
|
||||||
try:
|
try:
|
||||||
crawl_url = next_url(site)
|
crawl_url = next_url(site)
|
||||||
logging.info("crawling {}".format(crawl_url))
|
logging.info("crawling {}".format(crawl_url))
|
||||||
crawl_url.outlinks = browser.browse_page(crawl_url.url)
|
crawl_url.outlinks = browser.browse_page(crawl_url.url)
|
||||||
completed(site, crawl_url)
|
completed_url(site, crawl_url)
|
||||||
except kombu.simple.Empty:
|
except kombu.simple.Empty:
|
||||||
# if some timeout reached, re-raise?
|
# if some timeout reached, re-raise?
|
||||||
pass
|
pass
|
||||||
# except kombu.simple.Empty:
|
# except kombu.simple.Empty:
|
||||||
# logging.info("finished {} (queue is empty)".format(site))
|
# logging.info("finished {} (queue is empty)".format(site))
|
||||||
except umbra.browser.BrowsingAborted:
|
except umbra.browser.BrowsingAborted:
|
||||||
logging.info("{} shut down")
|
logging.info("{} shut down".format(browser))
|
||||||
finally:
|
finally:
|
||||||
with browsers_lock:
|
disclaim_site(site)
|
||||||
browsers.remove(browser)
|
browser_pool.release(browser)
|
||||||
|
|
||||||
class ShutdownRequested(Exception):
|
class ShutdownRequested(Exception):
|
||||||
pass
|
pass
|
||||||
|
@ -91,39 +92,46 @@ def sigint(signum, frame):
|
||||||
signal.signal(signal.SIGTERM, sigterm)
|
signal.signal(signal.SIGTERM, sigterm)
|
||||||
signal.signal(signal.SIGINT, sigint)
|
signal.signal(signal.SIGINT, sigint)
|
||||||
|
|
||||||
|
browser_pool = umbra.browser.BrowserPool(int(args.max_browsers),
|
||||||
|
chrome_exe=args.chrome_exe, proxy_server=args.proxy_server,
|
||||||
|
ignore_cert_errors=args.ignore_cert_errors)
|
||||||
|
|
||||||
latest_state = None
|
latest_state = None
|
||||||
chrome_port = 9200
|
|
||||||
try:
|
try:
|
||||||
while True:
|
while True:
|
||||||
if num_browsers < int(args.max_browsers):
|
with kombu.Connection(args.amqp_url) as conn:
|
||||||
with kombu.Connection(args.amqp_url) as conn:
|
q = conn.SimpleQueue("brozzler.sites.unclaimed")
|
||||||
q = conn.SimpleQueue("brozzler.sites.unclaimed")
|
q_empty = False
|
||||||
|
if len(q) > 0:
|
||||||
try:
|
try:
|
||||||
msg = q.get(block=True, timeout=0.5)
|
browser = browser_pool.acquire()
|
||||||
site = hq.Site(**msg.payload)
|
try:
|
||||||
logging.info("browsing site {}".format(site))
|
msg = q.get(block=True, timeout=0.5)
|
||||||
num_browsers += 1
|
site = hq.Site(**msg.payload)
|
||||||
msg.ack()
|
msg.ack()
|
||||||
th = threading.Thread(target=lambda: brozzle_site(site, chrome_port),
|
logging.info("browsing site {}".format(site))
|
||||||
name="BrowsingThread-{}".format(site.scope_surt))
|
th = threading.Thread(target=lambda: brozzle_site(site, browser),
|
||||||
th.start()
|
name="BrowsingThread-{}".format(site.scope_surt))
|
||||||
chrome_port += 1
|
th.start()
|
||||||
except kombu.simple.Empty:
|
except kombu.simple.Empty:
|
||||||
if latest_state != "no-unclaimed-sites":
|
q_empty = True
|
||||||
logging.info("no unclaimed sites to browse")
|
except KeyError:
|
||||||
latest_state = "no-unclaimed-sites"
|
if latest_state != "browsers-busy":
|
||||||
else:
|
logging.info("all {} browsers are busy".format(args.max_browsers))
|
||||||
if latest_state != "browsers-busy":
|
latest_state = "browsers-busy"
|
||||||
logging.info("all {} browsers are busy, not looking for unclaimed sites".format(args.max_browsers))
|
else:
|
||||||
latest_state = "browsers-busy"
|
q_empty = True
|
||||||
time.sleep(0.5)
|
|
||||||
|
if q_empty:
|
||||||
|
if latest_state != "no-unclaimed-sites":
|
||||||
|
logging.info("no unclaimed sites to browse")
|
||||||
|
latest_state = "no-unclaimed-sites"
|
||||||
|
time.sleep(0.5)
|
||||||
except ShutdownRequested as e:
|
except ShutdownRequested as e:
|
||||||
logging.info("shutting down browsers")
|
logging.info("shutting down browsers")
|
||||||
shutdown_requested.set()
|
shutdown_requested.set()
|
||||||
|
|
||||||
with browsers_lock:
|
browser_pool.shutdown_now()
|
||||||
for browser in browsers:
|
|
||||||
browser.abort_browse_page()
|
|
||||||
|
|
||||||
for th in threading.enumerate():
|
for th in threading.enumerate():
|
||||||
if th != threading.current_thread():
|
if th != threading.current_thread():
|
||||||
|
|
|
@ -22,12 +22,13 @@ class BrowserPool:
|
||||||
|
|
||||||
BASE_PORT = 9200
|
BASE_PORT = 9200
|
||||||
|
|
||||||
def __init__(self, size=3, chrome_exe='chromium-browser'):
|
def __init__(self, size=3, **kwargs):
|
||||||
|
"""kwargs are passed on to Browser.__init__"""
|
||||||
self._available = set()
|
self._available = set()
|
||||||
self._in_use = set()
|
self._in_use = set()
|
||||||
|
|
||||||
for i in range(0, size):
|
for i in range(0, size):
|
||||||
browser = Browser(BrowserPool.BASE_PORT + i, chrome_exe)
|
browser = Browser(BrowserPool.BASE_PORT + i, **kwargs)
|
||||||
self._available.add(browser)
|
self._available.add(browser)
|
||||||
|
|
||||||
self._lock = threading.Lock()
|
self._lock = threading.Lock()
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue