2015-07-10 21:07:47 -07:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# vim: set sw=4 et:
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import logging
|
|
|
|
import umbra
|
|
|
|
import threading
|
|
|
|
import time
|
|
|
|
import surt
|
|
|
|
import signal
|
|
|
|
import kombu
|
2015-07-11 02:29:19 -07:00
|
|
|
from umbra import hq
|
2015-07-13 15:57:14 -07:00
|
|
|
import pprint
|
|
|
|
import traceback
|
2015-07-13 16:40:56 -07:00
|
|
|
import youtube_dl
|
2015-07-10 21:07:47 -07:00
|
|
|
|
|
|
|
arg_parser = argparse.ArgumentParser(prog=os.path.basename(__file__),
|
|
|
|
description='crawl-url - browse urls, follow links',
|
|
|
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
|
|
|
arg_parser.add_argument('-u', '--url', dest='amqp_url', default='amqp://guest:guest@localhost:5672/%2f',
|
|
|
|
help='URL identifying the amqp server to talk to')
|
|
|
|
arg_parser.add_argument('-e', '--executable', dest='chrome_exe', default='chromium-browser',
|
|
|
|
help='executable to use to invoke chrome')
|
|
|
|
arg_parser.add_argument('-n', '--max-browsers', dest='max_browsers', default='1',
|
|
|
|
help='max number of chrome instances simultaneously browsing pages')
|
2015-07-11 23:07:47 -07:00
|
|
|
arg_parser.add_argument('--proxy-server', dest='proxy_server', default=None,
|
|
|
|
help='configure browser to use specified proxy server')
|
|
|
|
arg_parser.add_argument('--ignore-certificate-errors', dest='ignore_cert_errors',
|
|
|
|
action='store_true', help='configure browser to ignore certificate errors')
|
2015-07-10 21:07:47 -07:00
|
|
|
arg_parser.add_argument('-v', '--verbose', dest='log_level',
|
|
|
|
action="store_const", default=logging.INFO, const=logging.DEBUG)
|
|
|
|
arg_parser.add_argument('--version', action='version',
|
|
|
|
version="umbra {} - {}".format(umbra.version, os.path.basename(__file__)))
|
|
|
|
args = arg_parser.parse_args(args=sys.argv[1:])
|
|
|
|
|
|
|
|
logging.basicConfig(stream=sys.stdout, level=args.log_level,
|
|
|
|
format='%(asctime)s %(process)d %(levelname)s %(threadName)s %(name)s.%(funcName)s(%(filename)s:%(lineno)d) %(message)s')
|
|
|
|
|
2015-07-10 21:31:41 -07:00
|
|
|
shutdown_requested = threading.Event()
|
|
|
|
|
2015-07-11 02:29:19 -07:00
|
|
|
def next_url(site):
|
|
|
|
"""Raises kombu.simple.Empty if queue is empty"""
|
|
|
|
with kombu.Connection(args.amqp_url) as conn:
|
|
|
|
q = conn.SimpleQueue("brozzler.sites.{}.crawl_urls".format(site.id))
|
|
|
|
msg = q.get(block=True, timeout=0.5)
|
|
|
|
crawl_url_dict = msg.payload
|
|
|
|
crawl_url = umbra.CrawlUrl(**crawl_url_dict)
|
|
|
|
msg.ack()
|
|
|
|
return crawl_url
|
|
|
|
|
2015-07-13 12:13:41 -07:00
|
|
|
def completed_url(site, crawl_url):
|
2015-07-11 02:29:19 -07:00
|
|
|
with kombu.Connection(args.amqp_url) as conn:
|
|
|
|
q = conn.SimpleQueue("brozzler.sites.{}.completed_urls".format(site.id))
|
|
|
|
logging.info("putting {} on queue {}".format(crawl_url, q.queue.name))
|
|
|
|
q.put(crawl_url.to_dict())
|
|
|
|
|
2015-07-13 12:13:41 -07:00
|
|
|
def disclaim_site(site):
|
|
|
|
# XXX maybe should put on "disclaimed" queue and hq should put back on "unclaimed"
|
|
|
|
with kombu.Connection(args.amqp_url) as conn:
|
|
|
|
q = conn.SimpleQueue("brozzler.sites.unclaimed".format(site.id))
|
|
|
|
logging.info("putting {} on queue {}".format(site, q.queue.name))
|
|
|
|
q.put(site.to_dict())
|
|
|
|
|
2015-07-13 16:40:56 -07:00
|
|
|
ydl_extractors = youtube_dl.extractor.gen_extractors()
|
|
|
|
def ydl_suitable(url):
|
|
|
|
for ie in ydl_extractors:
|
|
|
|
if ie.suitable(url):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2015-07-13 12:13:41 -07:00
|
|
|
def brozzle_site(site, browser):
|
|
|
|
start = time.time()
|
|
|
|
try:
|
|
|
|
with browser:
|
|
|
|
while not shutdown_requested.is_set() and time.time() - start < 60:
|
2015-07-10 21:31:41 -07:00
|
|
|
try:
|
2015-07-11 02:29:19 -07:00
|
|
|
crawl_url = next_url(site)
|
2015-07-10 21:31:41 -07:00
|
|
|
logging.info("crawling {}".format(crawl_url))
|
2015-07-13 16:40:56 -07:00
|
|
|
if ydl_suitable(crawl_url.url):
|
|
|
|
logging.info("youtube-dl suitable for {}".format(crawl_url))
|
2015-07-10 21:31:41 -07:00
|
|
|
crawl_url.outlinks = browser.browse_page(crawl_url.url)
|
2015-07-13 12:13:41 -07:00
|
|
|
completed_url(site, crawl_url)
|
2015-07-10 21:31:41 -07:00
|
|
|
except kombu.simple.Empty:
|
2015-07-11 13:09:45 -07:00
|
|
|
# if some timeout reached, re-raise?
|
2015-07-10 21:31:41 -07:00
|
|
|
pass
|
2015-07-13 12:13:41 -07:00
|
|
|
# except kombu.simple.Empty:
|
|
|
|
# logging.info("finished {} (queue is empty)".format(site))
|
|
|
|
except umbra.browser.BrowsingAborted:
|
|
|
|
logging.info("{} shut down".format(browser))
|
|
|
|
finally:
|
|
|
|
disclaim_site(site)
|
|
|
|
browser_pool.release(browser)
|
2015-07-10 21:07:47 -07:00
|
|
|
|
|
|
|
class ShutdownRequested(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def sigterm(signum, frame):
|
|
|
|
raise ShutdownRequested('shutdown requested (caught SIGTERM)')
|
|
|
|
def sigint(signum, frame):
|
|
|
|
raise ShutdownRequested('shutdown requested (caught SIGINT)')
|
|
|
|
|
2015-07-13 15:57:14 -07:00
|
|
|
def dump_state(signum, frame):
|
|
|
|
pp = pprint.PrettyPrinter(indent=4)
|
|
|
|
state_strs = []
|
|
|
|
|
|
|
|
for th in threading.enumerate():
|
|
|
|
state_strs.append(str(th))
|
|
|
|
stack = traceback.format_stack(sys._current_frames()[th.ident])
|
|
|
|
state_strs.append("".join(stack))
|
|
|
|
|
|
|
|
logging.warn("dumping state (caught signal {})\n{}".format(signum, "\n".join(state_strs)))
|
|
|
|
|
|
|
|
signal.signal(signal.SIGQUIT, dump_state)
|
2015-07-10 21:07:47 -07:00
|
|
|
signal.signal(signal.SIGTERM, sigterm)
|
|
|
|
signal.signal(signal.SIGINT, sigint)
|
|
|
|
|
2015-07-13 12:13:41 -07:00
|
|
|
browser_pool = umbra.browser.BrowserPool(int(args.max_browsers),
|
|
|
|
chrome_exe=args.chrome_exe, proxy_server=args.proxy_server,
|
|
|
|
ignore_cert_errors=args.ignore_cert_errors)
|
|
|
|
|
2015-07-11 02:29:19 -07:00
|
|
|
latest_state = None
|
2015-07-10 21:07:47 -07:00
|
|
|
try:
|
|
|
|
while True:
|
2015-07-13 12:13:41 -07:00
|
|
|
with kombu.Connection(args.amqp_url) as conn:
|
|
|
|
q = conn.SimpleQueue("brozzler.sites.unclaimed")
|
|
|
|
q_empty = False
|
|
|
|
if len(q) > 0:
|
2015-07-10 21:07:47 -07:00
|
|
|
try:
|
2015-07-13 12:13:41 -07:00
|
|
|
browser = browser_pool.acquire()
|
|
|
|
try:
|
|
|
|
msg = q.get(block=True, timeout=0.5)
|
|
|
|
site = hq.Site(**msg.payload)
|
|
|
|
msg.ack()
|
|
|
|
logging.info("browsing site {}".format(site))
|
|
|
|
th = threading.Thread(target=lambda: brozzle_site(site, browser),
|
|
|
|
name="BrowsingThread-{}".format(site.scope_surt))
|
|
|
|
th.start()
|
|
|
|
except kombu.simple.Empty:
|
|
|
|
q_empty = True
|
|
|
|
except KeyError:
|
|
|
|
if latest_state != "browsers-busy":
|
|
|
|
logging.info("all {} browsers are busy".format(args.max_browsers))
|
|
|
|
latest_state = "browsers-busy"
|
|
|
|
else:
|
|
|
|
q_empty = True
|
|
|
|
|
|
|
|
if q_empty:
|
|
|
|
if latest_state != "no-unclaimed-sites":
|
|
|
|
logging.info("no unclaimed sites to browse")
|
|
|
|
latest_state = "no-unclaimed-sites"
|
|
|
|
time.sleep(0.5)
|
2015-07-10 21:07:47 -07:00
|
|
|
except ShutdownRequested as e:
|
|
|
|
logging.info("shutting down browsers")
|
2015-07-10 21:31:41 -07:00
|
|
|
shutdown_requested.set()
|
2015-07-10 21:07:47 -07:00
|
|
|
|
2015-07-13 12:13:41 -07:00
|
|
|
browser_pool.shutdown_now()
|
2015-07-10 21:07:47 -07:00
|
|
|
|
|
|
|
for th in threading.enumerate():
|
|
|
|
if th != threading.current_thread():
|
|
|
|
th.join()
|
|
|
|
|
|
|
|
logging.info("all done, exiting")
|
|
|
|
|