Merge branch 'metrics_plus_proxy_retries' into qa

This commit is contained in:
Barbara Miller 2024-09-24 09:13:10 -07:00
commit eb06474bed
6 changed files with 75 additions and 62 deletions

View file

@ -39,6 +39,10 @@ class PageInterstitialShown(Exception):
pass pass
class VideoExtractorError(Exception):
pass
class ProxyError(Exception): class ProxyError(Exception):
pass pass

View file

@ -238,7 +238,7 @@ def brozzle_page(argv=None):
"--metrics_port", "--metrics_port",
type=int, type=int,
dest="metrics_port", dest="metrics_port",
default=8888, default=0,
help="Port for brozzler's Prometheus scrape endpoint", help="Port for brozzler's Prometheus scrape endpoint",
) )
arg_parser.add_argument( arg_parser.add_argument(
@ -251,7 +251,7 @@ def brozzle_page(argv=None):
"--env", "--env",
dest="env", dest="env",
default=None, default=None,
help="env for Prometheus target registry", help="deployment environment for this brozzler instance, e.g., prod or qa",
) )
arg_parser.add_argument( arg_parser.add_argument(
"--screenshot-full-page", dest="screenshot_full_page", action="store_true" "--screenshot-full-page", dest="screenshot_full_page", action="store_true"
@ -298,7 +298,7 @@ def brozzle_page(argv=None):
window_height=args.window_height, window_height=args.window_height,
window_width=args.window_width, window_width=args.window_width,
stealth=args.stealth, stealth=args.stealth,
metrics_port=int(args.metrics_port), metrics_port=args.metrics_port,
registry_url=args.registry_url, registry_url=args.registry_url,
env=args.env, env=args.env,
) )
@ -543,7 +543,7 @@ def brozzler_worker(argv=None):
"--metrics_port", "--metrics_port",
type=int, type=int,
dest="metrics_port", dest="metrics_port",
default=8888, default=0,
help="Port for brozzler's Prometheus scrape endpoint", help="Port for brozzler's Prometheus scrape endpoint",
) )
arg_parser.add_argument( arg_parser.add_argument(
@ -556,7 +556,7 @@ def brozzler_worker(argv=None):
"--env", "--env",
dest="env", dest="env",
default=None, default=None,
help="env for Prometheus target registry", help="deployment environment for this brozzler instance, e.g., prod or qa",
) )
add_common_options(arg_parser, argv) add_common_options(arg_parser, argv)
@ -614,7 +614,7 @@ def brozzler_worker(argv=None):
skip_visit_hashtags=args.skip_visit_hashtags, skip_visit_hashtags=args.skip_visit_hashtags,
skip_youtube_dl=args.skip_youtube_dl, skip_youtube_dl=args.skip_youtube_dl,
stealth=args.stealth, stealth=args.stealth,
metrics_port=int(args.metrics_port), metrics_port=args.metrics_port,
registry_url=args.registry_url, registry_url=args.registry_url,
env=args.env, env=args.env,
) )

View file

@ -22,10 +22,6 @@ brozzler_page_processing_duration_seconds = Histogram("brozzler_page_processing_
brozzler_outlinks_found = Counter("brozzler_outlinks_found", "number of outlinks found by brozzler") brozzler_outlinks_found = Counter("brozzler_outlinks_found", "number of outlinks found by brozzler")
brozzler_last_page_crawled_time = Gauge("brozzler_last_page_crawled_time", "time of last page visit, in seconds since UNIX epoch") brozzler_last_page_crawled_time = Gauge("brozzler_last_page_crawled_time", "time of last page visit, in seconds since UNIX epoch")
brozzler_in_progress_pages = Gauge("brozzler_in_progress_pages", "number of pages currently processing with brozzler") brozzler_in_progress_pages = Gauge("brozzler_in_progress_pages", "number of pages currently processing with brozzler")
brozzler_resources_requested = Counter("brozzler_resources_requested", "number of resources requested", labelnames=["resource_type"])
brozzler_resources_fetched = Counter("brozzler_resources_fetched", "number of resources fetched", labelnames=["resource_type", "status_code"])
brozzler_resources_size_total = Counter("brozzler_resources_size_total", "total size of resources fetched", labelnames=["resource_type"])
brozzler_resources_fetch_time = Counter("brozzler_resources_fetch_time", "time spent fetching resources", labelnames=["resource_type"])
brozzler_ydl_urls_checked = Counter("brozzler_ydl_urls_checked", "count of urls checked by brozzler yt-dlp") brozzler_ydl_urls_checked = Counter("brozzler_ydl_urls_checked", "count of urls checked by brozzler yt-dlp")
brozzler_ydl_extract_attempts = Counter("brozzler_ydl_extract_attempts", "count of extracts attempted by brozzler yt-dlp", labelnames=["youtube_host"]) brozzler_ydl_extract_attempts = Counter("brozzler_ydl_extract_attempts", "count of extracts attempted by brozzler yt-dlp", labelnames=["youtube_host"])
brozzler_ydl_extract_successes = Counter("brozzler_ydl_extract_successes", "count of extracts completed by brozzler yt-dlp", labelnames=["youtube_host"]) brozzler_ydl_extract_successes = Counter("brozzler_ydl_extract_successes", "count of extracts completed by brozzler yt-dlp", labelnames=["youtube_host"])

View file

@ -72,7 +72,7 @@ class BrozzlerWorker:
stealth=False, stealth=False,
window_height=900, window_height=900,
window_width=1400, window_width=1400,
metrics_port=None, metrics_port=0,
registry_url=None, registry_url=None,
env=None, env=None,
): ):
@ -111,8 +111,15 @@ class BrozzlerWorker:
self._start_stop_lock = threading.Lock() self._start_stop_lock = threading.Lock()
self._shutdown = threading.Event() self._shutdown = threading.Event()
# Setup metrics # set up metrics
metrics.register_prom_metrics(self._metrics_port, self._registry_url, self._env) if self._metrics_port > 0:
metrics.register_prom_metrics(
self._metrics_port, self._registry_url, self._env
)
else:
logging.warning(
"not starting prometheus scrape endpoint: metrics_port is undefined"
)
def _choose_warcprox(self): def _choose_warcprox(self):
warcproxes = self._service_registry.available_services("warcprox") warcproxes = self._service_registry.available_services("warcprox")
@ -285,6 +292,11 @@ class BrozzlerWorker:
raise raise
except brozzler.ProxyError: except brozzler.ProxyError:
raise raise
except brozzler.VideoExtractorError as e:
logging.error(
"error extracting video info: %s",
e,
)
except Exception as e: except Exception as e:
if ( if (
hasattr(e, "exc_info") hasattr(e, "exc_info")

View file

@ -118,6 +118,11 @@ def should_ytdlp(site, page, page_status, skip_av_seeds):
return True return True
def isyoutubehost(url):
# split 1 splits scheme from url, split 2 splits path from hostname, split 3 splits query string on hostname
return "youtube.com" in url.split("//")[-1].split("/")[0].split("?")[0]
class ExtraHeaderAdder(urllib.request.BaseHandler): class ExtraHeaderAdder(urllib.request.BaseHandler):
def __init__(self, extra_headers): def __init__(self, extra_headers):
self.extra_headers = extra_headers self.extra_headers = extra_headers
@ -231,7 +236,6 @@ def _build_youtube_dl(worker, destdir, site, page):
worker._proxy_for(site), worker._proxy_for(site),
url, url,
) )
try:
with open(info_dict["filepath"], "rb") as f: with open(info_dict["filepath"], "rb") as f:
# include content-length header to avoid chunked # include content-length header to avoid chunked
# transfer, which warcprox currently rejects # transfer, which warcprox currently rejects
@ -245,6 +249,7 @@ def _build_youtube_dl(worker, destdir, site, page):
payload=f, payload=f,
extra_headers=extra_headers, extra_headers=extra_headers,
) )
# consulted by _remember_videos() # consulted by _remember_videos()
ydl.pushed_videos.append( ydl.pushed_videos.append(
{ {
@ -254,8 +259,6 @@ def _build_youtube_dl(worker, destdir, site, page):
"content-length": size, "content-length": size,
} }
) )
except:
traceback.print_exc()
def maybe_heartbeat_site_last_claimed(*args, **kwargs): def maybe_heartbeat_site_last_claimed(*args, **kwargs):
# in case yt-dlp takes a long time, heartbeat site.last_claimed # in case yt-dlp takes a long time, heartbeat site.last_claimed
@ -286,14 +289,9 @@ def _build_youtube_dl(worker, destdir, site, page):
worker.logger.info( worker.logger.info(
"[ydl_postprocess_hook] postprocessor: {}".format(d["postprocessor"]) "[ydl_postprocess_hook] postprocessor: {}".format(d["postprocessor"])
) )
youtube_host = ( is_youtube_host = isyoutubehost(d["info_dict"]["webpage_url"])
"youtube.com"
in d["info_dict"]["webpage_url"] metrics.brozzler_ydl_download_successes.labels(is_youtube_host).inc(1)
.split("//")[-1]
.split("/")[0]
.split("?")[0]
)
metrics.brozzler_ydl_download_successes.labels(youtube_host).inc(1)
if worker._using_warcprox(site): if worker._using_warcprox(site):
_YoutubeDL._push_video_to_warcprox( _YoutubeDL._push_video_to_warcprox(
_YoutubeDL, site, d["info_dict"], d["postprocessor"] _YoutubeDL, site, d["info_dict"], d["postprocessor"]
@ -332,15 +330,14 @@ def _build_youtube_dl(worker, destdir, site, page):
} }
ytdlp_url = page.redirect_url if page.redirect_url else page.url ytdlp_url = page.redirect_url if page.redirect_url else page.url
youtube_host = ( is_youtube_host = isyoutubehost(ytdlp_url)
"youtube.com" in ytdlp_url.split("//")[-1].split("/")[0].split("?")[0] if is_youtube_host and YTDLP_PROXY:
)
if youtube_host and YTDLP_PROXY:
ydl_opts["proxy"] = YTDLP_PROXY ydl_opts["proxy"] = YTDLP_PROXY
ytdlp_proxy_for_print = ( # don't log proxy value secrets
ytdlp_proxy_for_logs = (
YTDLP_PROXY.split("@")[1] if "@" in YTDLP_PROXY else "@@@" YTDLP_PROXY.split("@")[1] if "@" in YTDLP_PROXY else "@@@"
) )
logging.info("using yt-dlp proxy ... %s", ytdlp_proxy_for_print) logging.info("using yt-dlp proxy ... %s", ytdlp_proxy_for_logs)
# skip warcprox proxying yt-dlp v.2023.07.06: youtube extractor using ranges # skip warcprox proxying yt-dlp v.2023.07.06: youtube extractor using ranges
# if worker._proxy_for(site): # if worker._proxy_for(site):
@ -351,7 +348,7 @@ def _build_youtube_dl(worker, destdir, site, page):
ydl._opener.add_handler(ExtraHeaderAdder(site.extra_headers(page))) ydl._opener.add_handler(ExtraHeaderAdder(site.extra_headers(page)))
ydl.pushed_videos = [] ydl.pushed_videos = []
ydl.url = ytdlp_url ydl.url = ytdlp_url
ydl.youtube_host = youtube_host ydl.is_youtube_host = is_youtube_host
return ydl return ydl
@ -379,12 +376,12 @@ def _try_youtube_dl(worker, ydl, site, page):
while attempt < MAX_YTDLP_ATTEMPTS: while attempt < MAX_YTDLP_ATTEMPTS:
try: try:
logging.info("trying yt-dlp on %s", ydl.url) logging.info("trying yt-dlp on %s", ydl.url)
# should_download_vid = not ydl.youtube_host # should_download_vid = not ydl.is_youtube_host
# then # then
# ydl.extract_info(str(urlcanon.whatwg(ydl.url)), download=should_download_vid) # ydl.extract_info(str(urlcanon.whatwg(ydl.url)), download=should_download_vid)
# if ydl.youtube_host and ie_result: # if ydl.is_youtube_host and ie_result:
# download_url = ie_result.get("url") # download_url = ie_result.get("url")
metrics.brozzler_ydl_extract_attempts.labels(ydl.youtube_host).inc(1) metrics.brozzler_ydl_extract_attempts.labels(ydl.is_youtube_host).inc(1)
with brozzler.thread_accept_exceptions(): with brozzler.thread_accept_exceptions():
# we do whatwg canonicalization here to avoid "<urlopen error # we do whatwg canonicalization here to avoid "<urlopen error
# no host given>" resulting in ProxyError # no host given>" resulting in ProxyError
@ -393,7 +390,7 @@ def _try_youtube_dl(worker, ydl, site, page):
ie_result = ydl.sanitize_info( ie_result = ydl.sanitize_info(
ydl.extract_info(str(urlcanon.whatwg(ydl.url))) ydl.extract_info(str(urlcanon.whatwg(ydl.url)))
) )
metrics.brozzler_ydl_extract_successes.labels(ydl.youtube_host).inc(1) metrics.brozzler_ydl_extract_successes.labels(ydl.is_youtube_host).inc(1)
break break
except brozzler.ShutdownRequested as e: except brozzler.ShutdownRequested as e:
raise raise
@ -419,18 +416,22 @@ def _try_youtube_dl(worker, ydl, site, page):
logging.warning( logging.warning(
"Failed after %s attempts. Error: %s", MAX_YTDLP_ATTEMPTS, e "Failed after %s attempts. Error: %s", MAX_YTDLP_ATTEMPTS, e
) )
raise brozzler.ProxyError( raise brozzler.VideoExtractorError(
"yt-dlp hit possible external proxy error from %s" % ydl.url "yt-dlp hit error extracting info for %s" % ydl.url
) )
else: else:
retry_wait = min(60, YTDLP_WAIT * (1.5 ** (attempt - 1)))
logging.info( logging.info(
"Attempt %s failed. Retrying in %s seconds...", "Attempt %s failed. Retrying in %s seconds...",
attempt, attempt,
YTDLP_WAIT, retry_wait,
) )
time.sleep(YTDLP_WAIT) time.sleep(retry_wait)
else: else:
raise brozzler.ProxyError("Proxy attempt(s) failed for unknown reason(s)") raise brozzler.VideoExtractorError(
"yt-dlp hit unknown error extracting info for %s" % ydl.url
)
logging.info("ytdlp completed successfully") logging.info("ytdlp completed successfully")
_remember_videos(page, ydl.pushed_videos) _remember_videos(page, ydl.pushed_videos)

View file

@ -34,7 +34,7 @@ def find_package_data(package):
setuptools.setup( setuptools.setup(
name="brozzler", name="brozzler",
version="1.5.55a3", version="1.5.55",
description="Distributed web crawling with browsers", description="Distributed web crawling with browsers",
url="https://github.com/internetarchive/brozzler", url="https://github.com/internetarchive/brozzler",
author="Noah Levitt", author="Noah Levitt",