2014-05-20 02:42:40 -07:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# vim: set sw=4 et:
|
|
|
|
|
|
|
|
import logging
|
|
|
|
import time
|
|
|
|
import threading
|
|
|
|
import kombu
|
2014-06-09 13:15:05 -07:00
|
|
|
import socket
|
|
|
|
from umbra.browser import BrowserPool, BrowsingException
|
2014-05-20 02:42:40 -07:00
|
|
|
|
|
|
|
class AmqpBrowserController:
|
2014-05-23 13:34:07 -07:00
|
|
|
"""
|
|
|
|
Consumes amqp messages representing requests to browse urls, from the
|
|
|
|
specified amqp queue (default: "urls") on the specified amqp exchange
|
|
|
|
(default: "umbra"). Incoming amqp message is a json object with 3
|
|
|
|
attributes:
|
|
|
|
|
2014-05-20 02:42:40 -07:00
|
|
|
{
|
|
|
|
"clientId": "umbra.client.123",
|
|
|
|
"url": "http://example.com/my_fancy_page",
|
|
|
|
"metadata": {"arbitrary":"fields", "etc":4}
|
|
|
|
}
|
|
|
|
|
|
|
|
"url" is the url to browse.
|
|
|
|
|
2014-05-23 13:34:07 -07:00
|
|
|
"clientId" uniquely identifies the client of umbra. Umbra uses the clientId
|
|
|
|
as the amqp routing key, to direct information via amqp back to the client.
|
|
|
|
It sends this information on the same specified amqp exchange (default:
|
|
|
|
"umbra").
|
2014-05-20 02:42:40 -07:00
|
|
|
|
|
|
|
Each url requested in the browser is published to amqp this way. The
|
|
|
|
outgoing amqp message is a json object:
|
|
|
|
|
|
|
|
{
|
2014-05-23 13:34:07 -07:00
|
|
|
"url": "http://example.com/images/embedded_thing.jpg",
|
|
|
|
"method": "GET",
|
|
|
|
"headers": {"User-Agent": "...", "Accept": "...", ...},
|
|
|
|
"parentUrl": "http://example.com/my_fancy_page",
|
|
|
|
"parentUrlMetadata": {"arbitrary":"fields", "etc":4, ...}
|
2014-05-20 02:42:40 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
POST requests have an additional field, postData.
|
|
|
|
"""
|
|
|
|
|
|
|
|
logger = logging.getLogger(__module__ + "." + __qualname__)
|
|
|
|
|
2014-05-20 03:02:48 -07:00
|
|
|
def __init__(self, amqp_url='amqp://guest:guest@localhost:5672/%2f',
|
2014-06-11 10:58:08 -07:00
|
|
|
chrome_exe='chromium-browser', max_active_browsers=1,
|
|
|
|
queue_name='urls', exchange_name='umbra'):
|
2014-05-20 02:42:40 -07:00
|
|
|
self.amqp_url = amqp_url
|
|
|
|
self.queue_name = queue_name
|
|
|
|
self.exchange_name = exchange_name
|
2014-07-02 10:30:51 -07:00
|
|
|
self.max_active_browsers = max_active_browsers
|
2014-05-20 02:42:40 -07:00
|
|
|
|
2014-06-11 10:58:08 -07:00
|
|
|
self._browser_pool = BrowserPool(size=max_active_browsers, chrome_exe=chrome_exe)
|
2014-05-23 21:59:34 -07:00
|
|
|
|
|
|
|
def start(self):
|
2014-06-09 13:15:05 -07:00
|
|
|
self._browsing_threads = set()
|
|
|
|
self._browsing_threads_lock = threading.Lock()
|
|
|
|
|
2014-05-23 21:59:34 -07:00
|
|
|
self._exchange = kombu.Exchange(name=self.exchange_name, type='direct',
|
|
|
|
durable=True)
|
2014-05-20 02:42:40 -07:00
|
|
|
|
2014-06-23 17:18:27 -07:00
|
|
|
self._reconnect_requested = False
|
|
|
|
|
2014-05-23 21:59:34 -07:00
|
|
|
self._producer = None
|
|
|
|
self._producer_lock = threading.Lock()
|
|
|
|
with self._producer_lock:
|
|
|
|
self._producer_conn = kombu.Connection(self.amqp_url)
|
|
|
|
self._producer = self._producer_conn.Producer(serializer='json')
|
|
|
|
|
2014-06-09 13:15:05 -07:00
|
|
|
self._consumer_thread = threading.Thread(target=self._consume_amqp, name='AmqpConsumerThread')
|
|
|
|
self._consumer_stop = threading.Event()
|
|
|
|
self._consumer_thread.start()
|
2014-05-20 02:42:40 -07:00
|
|
|
|
|
|
|
def shutdown(self):
|
|
|
|
self.logger.info("shutting down amqp consumer {}".format(self.amqp_url))
|
2014-06-09 13:15:05 -07:00
|
|
|
self._consumer_stop.set()
|
|
|
|
self._consumer_thread.join()
|
2014-05-24 01:52:22 -07:00
|
|
|
|
|
|
|
def shutdown_now(self):
|
2014-06-09 13:15:05 -07:00
|
|
|
self._consumer_stop.set()
|
2014-05-24 01:52:22 -07:00
|
|
|
self._browser_pool.shutdown_now()
|
2014-06-09 13:15:05 -07:00
|
|
|
self._consumer_thread.join()
|
2014-05-20 02:42:40 -07:00
|
|
|
|
2014-06-23 17:18:27 -07:00
|
|
|
def reconnect(self, *args, **kwargs):
|
|
|
|
self._reconnect_requested = True
|
|
|
|
self._browser_pool.shutdown_now()
|
|
|
|
|
2014-06-11 10:58:08 -07:00
|
|
|
def _wait_for_and_browse_urls(self, conn, consumer, timeout):
|
|
|
|
start = time.time()
|
|
|
|
browser = None
|
2014-07-02 10:30:51 -07:00
|
|
|
consumer.qos(prefetch_count=self.max_active_browsers)
|
2014-06-11 10:58:08 -07:00
|
|
|
|
2014-06-23 17:18:27 -07:00
|
|
|
while not self._consumer_stop.is_set() and time.time() - start < timeout and not self._reconnect_requested:
|
2014-06-11 10:58:08 -07:00
|
|
|
try:
|
|
|
|
browser = self._browser_pool.acquire() # raises KeyError if none available
|
|
|
|
browser.start()
|
|
|
|
|
|
|
|
def callback(body, message):
|
2014-08-19 18:51:43 -07:00
|
|
|
try:
|
|
|
|
client_id, url, metadata = body['clientId'], body['url'], body['metadata']
|
|
|
|
except:
|
|
|
|
self.logger.error("unable to decipher message {}".format(message), exc_info=True)
|
|
|
|
self.logger.error("discarding bad message")
|
|
|
|
message.reject()
|
|
|
|
browser.stop()
|
|
|
|
self._browser_pool.release(browser)
|
|
|
|
return
|
|
|
|
self._start_browsing_page(browser, message, client_id, url, metadata)
|
|
|
|
|
2014-06-11 10:58:08 -07:00
|
|
|
consumer.callbacks = [callback]
|
|
|
|
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
conn.drain_events(timeout=0.5)
|
|
|
|
break # out of "while True" to acquire another browser
|
|
|
|
except socket.timeout:
|
|
|
|
pass
|
2014-11-03 11:54:10 -08:00
|
|
|
except socket.error:
|
|
|
|
self.logger.error("problem consuming messages from AMQP, will try reconnecting after active browsing finishes", exc_info=True)
|
|
|
|
self._reconnect_requested = True
|
2014-06-11 10:58:08 -07:00
|
|
|
|
2014-06-23 17:18:27 -07:00
|
|
|
if self._consumer_stop.is_set() or time.time() - start >= timeout or self._reconnect_requested:
|
2014-06-11 10:58:08 -07:00
|
|
|
browser.stop()
|
|
|
|
self._browser_pool.release(browser)
|
|
|
|
break
|
|
|
|
|
|
|
|
except KeyError:
|
|
|
|
# no browsers available
|
|
|
|
time.sleep(0.5)
|
|
|
|
except:
|
|
|
|
self.logger.critical("problem with browser initialization", exc_info=True)
|
|
|
|
time.sleep(0.5)
|
|
|
|
finally:
|
|
|
|
consumer.callbacks = None
|
|
|
|
|
|
|
|
def _wait_for_active_browsers(self):
|
|
|
|
self.logger.info("waiting for browsing threads to finish")
|
|
|
|
while True:
|
|
|
|
with self._browsing_threads_lock:
|
|
|
|
if len(self._browsing_threads) == 0:
|
|
|
|
break
|
|
|
|
time.sleep(0.5)
|
|
|
|
self.logger.info("active browsing threads finished")
|
|
|
|
|
2014-05-20 02:42:40 -07:00
|
|
|
def _consume_amqp(self):
|
|
|
|
# XXX https://webarchive.jira.com/browse/ARI-3811
|
|
|
|
# After running for some amount of time (3 weeks in the latest case),
|
|
|
|
# consumer looks normal but doesn't consume any messages. Not clear if
|
|
|
|
# it's hanging in drain_events() or not. As a temporary measure for
|
|
|
|
# mitigation (if it works) or debugging (if it doesn't work), close and
|
2014-06-18 14:58:44 -07:00
|
|
|
# reopen the connection every 2.5 hours
|
|
|
|
RECONNECT_AFTER_SECONDS = 150 * 60
|
2014-05-20 02:42:40 -07:00
|
|
|
|
2014-06-11 10:58:08 -07:00
|
|
|
url_queue = kombu.Queue(self.queue_name, exchange=self._exchange)
|
2014-05-23 21:59:34 -07:00
|
|
|
|
2014-06-09 13:15:05 -07:00
|
|
|
while not self._consumer_stop.is_set():
|
2014-05-20 02:42:40 -07:00
|
|
|
try:
|
|
|
|
self.logger.info("connecting to amqp exchange={} at {}".format(self._exchange.name, self.amqp_url))
|
2014-06-23 17:18:27 -07:00
|
|
|
self._reconnect_requested = False
|
2014-05-20 02:42:40 -07:00
|
|
|
with kombu.Connection(self.amqp_url) as conn:
|
2014-05-23 21:59:34 -07:00
|
|
|
with conn.Consumer(url_queue) as consumer:
|
2014-06-11 10:58:08 -07:00
|
|
|
self._wait_for_and_browse_urls(conn, consumer, timeout=RECONNECT_AFTER_SECONDS)
|
2014-06-09 13:15:05 -07:00
|
|
|
|
|
|
|
# need to wait for browsers to finish here, before closing
|
|
|
|
# the amqp connection, because they use it to do
|
|
|
|
# message.ack() after they finish browsing a page
|
2014-06-11 10:58:08 -07:00
|
|
|
self._wait_for_active_browsers()
|
2014-05-20 02:42:40 -07:00
|
|
|
except BaseException as e:
|
2014-05-28 18:57:21 -07:00
|
|
|
self.logger.error("caught exception {}".format(e), exc_info=True)
|
2014-05-23 13:34:07 -07:00
|
|
|
time.sleep(0.5)
|
2014-05-20 02:42:40 -07:00
|
|
|
self.logger.error("attempting to reopen amqp connection")
|
|
|
|
|
2014-06-09 13:15:05 -07:00
|
|
|
def _start_browsing_page(self, browser, message, client_id, url, parent_url_metadata):
|
2014-05-20 02:42:40 -07:00
|
|
|
def on_request(chrome_msg):
|
|
|
|
payload = chrome_msg['params']['request']
|
2014-05-23 21:59:34 -07:00
|
|
|
payload['parentUrl'] = url
|
|
|
|
payload['parentUrlMetadata'] = parent_url_metadata
|
2014-05-20 02:42:40 -07:00
|
|
|
self.logger.debug('sending to amqp exchange={} routing_key={} payload={}'.format(self.exchange_name, client_id, payload))
|
2014-05-23 21:59:34 -07:00
|
|
|
with self._producer_lock:
|
|
|
|
publish = self._producer_conn.ensure(self._producer, self._producer.publish)
|
2014-05-20 02:42:40 -07:00
|
|
|
publish(payload, exchange=self._exchange, routing_key=client_id)
|
|
|
|
|
2014-06-11 10:58:08 -07:00
|
|
|
def browse_page_sync():
|
2014-05-23 21:59:34 -07:00
|
|
|
self.logger.info('browser={} client_id={} url={}'.format(browser, client_id, url))
|
2014-05-24 01:52:22 -07:00
|
|
|
try:
|
|
|
|
browser.browse_page(url, on_request=on_request)
|
2014-06-09 13:15:05 -07:00
|
|
|
message.ack()
|
|
|
|
except BrowsingException as e:
|
|
|
|
self.logger.warn("browsing did not complete normally, requeuing url {} - {}".format(url, e))
|
|
|
|
message.requeue()
|
2014-05-30 23:07:39 -07:00
|
|
|
except:
|
2014-06-09 13:15:05 -07:00
|
|
|
self.logger.critical("problem browsing page, requeuing url {}, may have lost browser process".format(url), exc_info=True)
|
|
|
|
message.requeue()
|
|
|
|
finally:
|
|
|
|
browser.stop()
|
|
|
|
self._browser_pool.release(browser)
|
|
|
|
|
2014-06-11 10:58:08 -07:00
|
|
|
def browse_thread_run_then_cleanup():
|
|
|
|
browse_page_sync()
|
2014-06-09 13:15:05 -07:00
|
|
|
with self._browsing_threads_lock:
|
|
|
|
self._browsing_threads.remove(threading.current_thread())
|
2014-05-20 02:42:40 -07:00
|
|
|
|
2014-05-23 22:30:25 -07:00
|
|
|
import random
|
2014-06-11 10:58:08 -07:00
|
|
|
thread_name = "BrowsingThread{}-{}".format(browser.chrome_port,
|
2014-05-23 22:30:25 -07:00
|
|
|
''.join((random.choice('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789') for _ in range(6))))
|
2014-06-11 10:58:08 -07:00
|
|
|
th = threading.Thread(target=browse_thread_run_then_cleanup, name=thread_name)
|
2014-06-09 13:15:05 -07:00
|
|
|
with self._browsing_threads_lock:
|
|
|
|
self._browsing_threads.add(th)
|
|
|
|
th.start()
|
2014-05-20 02:42:40 -07:00
|
|
|
|