mirror of
https://github.com/internetarchive/brozzler.git
synced 2025-02-24 16:49:56 -05:00
new script brozzler-new-job to queue a new job with brozzler based on yaml configuration file
This commit is contained in:
parent
e6eeca6ae2
commit
b6beac3807
77
bin/brozzler-new-job
Executable file
77
bin/brozzler-new-job
Executable file
@ -0,0 +1,77 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# vim: set sw=4 et:
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
import brozzler
|
||||||
|
import kombu
|
||||||
|
import yaml
|
||||||
|
import json
|
||||||
|
|
||||||
|
arg_parser = argparse.ArgumentParser(prog=os.path.basename(__file__),
|
||||||
|
description="brozzler-new-job - queue new job with brozzler",
|
||||||
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
arg_parser.add_argument('job_conf_file', metavar='JOB_CONF_FILE', help='brozzler job configuration file in yaml')
|
||||||
|
arg_parser.add_argument('-u', '--amqp-url', dest='amqp_url', default='amqp://guest:guest@localhost:5672/%2f',
|
||||||
|
help='URL identifying the amqp server to talk to')
|
||||||
|
arg_parser.add_argument("-v", "--verbose", dest="log_level",
|
||||||
|
action="store_const", default=logging.INFO, const=logging.DEBUG)
|
||||||
|
arg_parser.add_argument("--version", action="version",
|
||||||
|
version="brozzler {} - {}".format(brozzler.version, os.path.basename(__file__)))
|
||||||
|
args = arg_parser.parse_args(args=sys.argv[1:])
|
||||||
|
|
||||||
|
logging.basicConfig(stream=sys.stdout, level=args.log_level,
|
||||||
|
format="%(asctime)s %(process)d %(levelname)s %(threadName)s %(name)s.%(funcName)s(%(filename)s:%(lineno)d) %(message)s")
|
||||||
|
|
||||||
|
def merge(a, b):
|
||||||
|
if isinstance(a, dict) and isinstance(b, dict):
|
||||||
|
merged = dict(a)
|
||||||
|
b_tmp = dict(b)
|
||||||
|
for k in a:
|
||||||
|
merged[k] = merge(a[k], b_tmp.pop(k, None))
|
||||||
|
merged.update(b_tmp)
|
||||||
|
return merged
|
||||||
|
elif isinstance(a, list) and isinstance(b, list):
|
||||||
|
return a + b
|
||||||
|
else:
|
||||||
|
return a
|
||||||
|
|
||||||
|
logging.info("loading %s", args.job_conf_file)
|
||||||
|
with open(args.job_conf_file) as f:
|
||||||
|
job_conf = yaml.load(f)
|
||||||
|
logging.info("job_conf=%s", job_conf)
|
||||||
|
|
||||||
|
seeds = job_conf.pop("seeds")
|
||||||
|
# logging.info("=== global settings ===\n%s", yaml.dump(job_conf))
|
||||||
|
|
||||||
|
sites = []
|
||||||
|
for seed_conf in seeds:
|
||||||
|
if "id" in seed_conf:
|
||||||
|
seed_conf.pop("id")
|
||||||
|
merged_conf = merge(seed_conf, job_conf)
|
||||||
|
# XXX check for unknown settings, invalid url, etc
|
||||||
|
# logging.info("merge(%s, %s) = %s", seed_conf, global_conf, merged_conf)
|
||||||
|
# logging.info("=== seed_conf ===\n%s", yaml.dump(seed_conf))
|
||||||
|
# logging.info("=== merged_conf ===\n%s", yaml.dump(merged_conf))
|
||||||
|
|
||||||
|
extra_headers = None
|
||||||
|
if "warcprox_meta" in merged_conf:
|
||||||
|
warcprox_meta = json.dumps(merged_conf["warcprox_meta"], separators=(',', ':'))
|
||||||
|
extra_headers = {"Warcprox-Meta":warcprox_meta}
|
||||||
|
site = brozzler.Site(seed=merged_conf["url"],
|
||||||
|
scope=merged_conf.get("scope"),
|
||||||
|
proxy=merged_conf.get("proxy"),
|
||||||
|
ignore_robots=merged_conf.get("ignore_robots"),
|
||||||
|
enable_warcprox_features=merged_conf.get("enable_warcprox_features"),
|
||||||
|
extra_headers=extra_headers)
|
||||||
|
sites.append(site)
|
||||||
|
|
||||||
|
with kombu.Connection(args.amqp_url) as conn:
|
||||||
|
q = conn.SimpleQueue("brozzler.sites.new")
|
||||||
|
for site in sites:
|
||||||
|
d = site.to_dict()
|
||||||
|
logging.info("feeding amqp queue %s with %s", repr(q.queue.name), repr(d))
|
||||||
|
q.put(d)
|
||||||
|
|
@ -161,7 +161,7 @@ class BrozzlerHQ:
|
|||||||
if res.status_code == 420 and 'warcprox-meta' in res.headers:
|
if res.status_code == 420 and 'warcprox-meta' in res.headers:
|
||||||
raise brozzler.ReachedLimit(warcprox_meta=json.loads(res.headers['warcprox-meta']), http_payload=res.text)
|
raise brozzler.ReachedLimit(warcprox_meta=json.loads(res.headers['warcprox-meta']), http_payload=res.text)
|
||||||
else:
|
else:
|
||||||
return response
|
return res
|
||||||
|
|
||||||
if not site.id in self._robots_caches:
|
if not site.id in self._robots_caches:
|
||||||
req_sesh = SessionRaiseOn420()
|
req_sesh = SessionRaiseOn420()
|
||||||
|
@ -8,34 +8,32 @@ import brozzler
|
|||||||
class Site:
|
class Site:
|
||||||
logger = logging.getLogger(__module__ + "." + __qualname__)
|
logger = logging.getLogger(__module__ + "." + __qualname__)
|
||||||
|
|
||||||
def __init__(self, seed, id=None, scope_surt=None, proxy=None,
|
def __init__(self, seed, id=None, scope=None, proxy=None,
|
||||||
ignore_robots=False, time_limit=None, extra_headers=None,
|
ignore_robots=False, extra_headers=None,
|
||||||
enable_warcprox_features=False, reached_limit=None):
|
enable_warcprox_features=False, reached_limit=None):
|
||||||
self.seed = seed
|
self.seed = seed
|
||||||
self.id = id
|
self.id = id
|
||||||
self.proxy = proxy
|
self.proxy = proxy
|
||||||
self.ignore_robots = ignore_robots
|
self.ignore_robots = ignore_robots
|
||||||
self.enable_warcprox_features = enable_warcprox_features
|
self.enable_warcprox_features = bool(enable_warcprox_features)
|
||||||
self.time_limit = time_limit
|
|
||||||
self.extra_headers = extra_headers
|
self.extra_headers = extra_headers
|
||||||
self.reached_limit = reached_limit
|
self.reached_limit = reached_limit
|
||||||
|
|
||||||
if scope_surt:
|
self.scope = scope or {}
|
||||||
self.scope_surt = scope_surt
|
if not "surt" in scope:
|
||||||
else:
|
self.scope["surt"] = surt.GoogleURLCanonicalizer.canonicalize(surt.handyurl.parse(seed)).getURLString(surt=True, trailing_comma=True)
|
||||||
self.scope_surt = surt.GoogleURLCanonicalizer.canonicalize(surt.handyurl.parse(seed)).getURLString(surt=True, trailing_comma=True)
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return """Site(id={},seed={},scope_surt={},proxy={},enable_warcprox_features={},ignore_robots={},extra_headers={},reached_limit={})""".format(
|
return """Site(id={},seed={},scope={},proxy={},enable_warcprox_features={},ignore_robots={},extra_headers={},reached_limit={})""".format(
|
||||||
self.id, repr(self.seed), repr(self.scope_surt),
|
self.id, repr(self.seed), repr(self.scope),
|
||||||
repr(self.proxy), self.enable_warcprox_features,
|
repr(self.proxy), self.enable_warcprox_features,
|
||||||
self.ignore_robots, self.extra_headers, self.reached_limit)
|
self.ignore_robots, self.extra_headers, self.reached_limit)
|
||||||
|
|
||||||
def note_seed_redirect(self, url):
|
def note_seed_redirect(self, url):
|
||||||
new_scope_surt = surt.GoogleURLCanonicalizer.canonicalize(surt.handyurl.parse(url)).getURLString(surt=True, trailing_comma=True)
|
new_scope_surt = surt.GoogleURLCanonicalizer.canonicalize(surt.handyurl.parse(url)).getURLString(surt=True, trailing_comma=True)
|
||||||
if not new_scope_surt.startswith(self.scope_surt):
|
if not new_scope_surt.startswith(self.scope["surt"]):
|
||||||
self.logger.info("changing site scope surt from {} to {}".format(self.scope_surt, new_scope_surt))
|
self.logger.info("changing site scope surt from {} to {}".format(self.scope["surt"], new_scope_surt))
|
||||||
self.scope_surt = new_scope_surt
|
self.scope["surt"] = new_scope_surt
|
||||||
|
|
||||||
def note_limit_reached(self, e):
|
def note_limit_reached(self, e):
|
||||||
self.logger.info("reached_limit e=%s", e)
|
self.logger.info("reached_limit e=%s", e)
|
||||||
@ -55,7 +53,7 @@ class Site:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
surtt = surt.GoogleURLCanonicalizer.canonicalize(hurl).getURLString(surt=True, trailing_comma=True)
|
surtt = surt.GoogleURLCanonicalizer.canonicalize(hurl).getURLString(surt=True, trailing_comma=True)
|
||||||
return surtt.startswith(self.scope_surt)
|
return surtt.startswith(self.scope["surt"])
|
||||||
except:
|
except:
|
||||||
self.logger.warn("""problem parsing url "{}" """.format(url))
|
self.logger.warn("""problem parsing url "{}" """.format(url))
|
||||||
return False
|
return False
|
||||||
|
Loading…
x
Reference in New Issue
Block a user