mirror of
https://github.com/internetarchive/brozzler.git
synced 2025-02-25 00:59:52 -05:00
Merge branch 'metrics_plus_proxy_retries' into qa
This commit is contained in:
commit
44b5317b93
@ -236,15 +236,16 @@ def brozzle_page(argv=None):
|
|||||||
)
|
)
|
||||||
arg_parser.add_argument(
|
arg_parser.add_argument(
|
||||||
"--metrics_port",
|
"--metrics_port",
|
||||||
|
type=int,
|
||||||
dest="metrics_port",
|
dest="metrics_port",
|
||||||
default=8888,
|
default=8888,
|
||||||
help="Prometheus metrics port",
|
help="Port for brozzler's Prometheus scrape endpoint",
|
||||||
)
|
)
|
||||||
arg_parser.add_argument(
|
arg_parser.add_argument(
|
||||||
"--registry_url",
|
"--registry_url",
|
||||||
dest="registry_url",
|
dest="registry_url",
|
||||||
default=None,
|
default=None,
|
||||||
help="Prometheus scrape target registry URL",
|
help="http-sd-registry url, for Prometheus metrics discovery",
|
||||||
)
|
)
|
||||||
arg_parser.add_argument(
|
arg_parser.add_argument(
|
||||||
"--env",
|
"--env",
|
||||||
@ -297,7 +298,7 @@ def brozzle_page(argv=None):
|
|||||||
window_height=args.window_height,
|
window_height=args.window_height,
|
||||||
window_width=args.window_width,
|
window_width=args.window_width,
|
||||||
stealth=args.stealth,
|
stealth=args.stealth,
|
||||||
metrics_port=args.metrics_port,
|
metrics_port=int(args.metrics_port),
|
||||||
registry_url=args.registry_url,
|
registry_url=args.registry_url,
|
||||||
env=args.env,
|
env=args.env,
|
||||||
)
|
)
|
||||||
@ -540,15 +541,16 @@ def brozzler_worker(argv=None):
|
|||||||
)
|
)
|
||||||
arg_parser.add_argument(
|
arg_parser.add_argument(
|
||||||
"--metrics_port",
|
"--metrics_port",
|
||||||
dest=metrics_port,
|
type=int,
|
||||||
|
dest="metrics_port",
|
||||||
default=8888,
|
default=8888,
|
||||||
help="Prometheus metrics port",
|
help="Port for brozzler's Prometheus scrape endpoint",
|
||||||
)
|
)
|
||||||
arg_parser.add_argument(
|
arg_parser.add_argument(
|
||||||
"--registry_url",
|
"--registry_url",
|
||||||
dest="registry_url",
|
dest="registry_url",
|
||||||
default=None,
|
default=None,
|
||||||
help="Prometheus scrape target registry URL",
|
help="http-sd-registry url, for Prometheus metrics discovery",
|
||||||
)
|
)
|
||||||
arg_parser.add_argument(
|
arg_parser.add_argument(
|
||||||
"--env",
|
"--env",
|
||||||
@ -612,7 +614,7 @@ def brozzler_worker(argv=None):
|
|||||||
skip_visit_hashtags=args.skip_visit_hashtags,
|
skip_visit_hashtags=args.skip_visit_hashtags,
|
||||||
skip_youtube_dl=args.skip_youtube_dl,
|
skip_youtube_dl=args.skip_youtube_dl,
|
||||||
stealth=args.stealth,
|
stealth=args.stealth,
|
||||||
metrics_port=args.metrics_port,
|
metrics_port=int(args.metrics_port),
|
||||||
registry_url=args.registry_url,
|
registry_url=args.registry_url,
|
||||||
env=args.env,
|
env=args.env,
|
||||||
)
|
)
|
||||||
|
@ -10,6 +10,7 @@ try:
|
|||||||
)
|
)
|
||||||
from http_sd_registry.config import ClientConfig
|
from http_sd_registry.config import ClientConfig
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
# for users without access to http_sd_registry
|
||||||
http_sd_registry = None
|
http_sd_registry = None
|
||||||
|
|
||||||
|
|
||||||
@ -19,15 +20,15 @@ from prometheus_client import Counter, Gauge, Histogram, start_http_server
|
|||||||
brozzler_pages_crawled = Counter("brozzler_pages_crawled", "number of pages visited by brozzler")
|
brozzler_pages_crawled = Counter("brozzler_pages_crawled", "number of pages visited by brozzler")
|
||||||
brozzler_page_processing_duration_seconds = Histogram("brozzler_page_processing_duration_seconds", "time spent processing a page in brozzler")
|
brozzler_page_processing_duration_seconds = Histogram("brozzler_page_processing_duration_seconds", "time spent processing a page in brozzler")
|
||||||
brozzler_outlinks_found = Counter("brozzler_outlinks_found", "number of outlinks found by brozzler")
|
brozzler_outlinks_found = Counter("brozzler_outlinks_found", "number of outlinks found by brozzler")
|
||||||
brozzler_last_page_crawled_time = Gauge("brozzler_last_page_crawled_time", "time of last page visit")
|
brozzler_last_page_crawled_time = Gauge("brozzler_last_page_crawled_time", "time of last page visit, in seconds since UNIX epoch")
|
||||||
brozzler_in_progress_pages = Gauge("brozzler_in_progress_pages", "number of pages currently processing with brozzler")
|
brozzler_in_progress_pages = Gauge("brozzler_in_progress_pages", "number of pages currently processing with brozzler")
|
||||||
brozzler_resources_requested = Counter("brozzler_resources_requested", "number of resources requested", labelnames=["resource_type"])
|
brozzler_resources_requested = Counter("brozzler_resources_requested", "number of resources requested", labelnames=["resource_type"])
|
||||||
brozzler_resources_fetched = Counter("brozzler_resources_fetched", "number of resources fetched", labelnames=["resource_type", "status_code"])
|
brozzler_resources_fetched = Counter("brozzler_resources_fetched", "number of resources fetched", labelnames=["resource_type", "status_code"])
|
||||||
brozzler_resources_size_total = Counter("brozzler_resources_size_total", "total size of resources fetched", labelnames=["resource_type"])
|
brozzler_resources_size_total = Counter("brozzler_resources_size_total", "total size of resources fetched", labelnames=["resource_type"])
|
||||||
brozzler_resources_fetch_time = Counter("brozzler_resources_fetch_time", "time spent fetching resources", labelnames=["resource_type"])
|
brozzler_resources_fetch_time = Counter("brozzler_resources_fetch_time", "time spent fetching resources", labelnames=["resource_type"])
|
||||||
brozzler_ydl_urls_checked = Counter("brozzler_ydl_urls_checked", "count of urls checked by brozzler yt-dlp")
|
brozzler_ydl_urls_checked = Counter("brozzler_ydl_urls_checked", "count of urls checked by brozzler yt-dlp")
|
||||||
brozzler_ydl_download_attempts = Counter("brozzler_ydl_download_attempts", "count of download attempted by brozzler yt-dlp", labelnames=["host"])
|
brozzler_ydl_extract_attempts = Counter("brozzler_ydl_download_attempts", "count of download attempted by brozzler yt-dlp", labelnames=["youtube_host"])
|
||||||
brozzler_ydl_download_successes = Counter("brozzler_ydl_download_successes", "count of downloads completed by brozzler yt-dlp", labelnames=["host"])
|
brozzler_ydl_extract_successes = Counter("brozzler_ydl_download_successes", "count of downloads completed by brozzler yt-dlp", labelnames=["youtube_host"])
|
||||||
# fmt: on
|
# fmt: on
|
||||||
|
|
||||||
|
|
||||||
@ -42,12 +43,19 @@ def register_prom_metrics(
|
|||||||
if registry_url is None:
|
if registry_url is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if env == "qa":
|
||||||
|
env_for_prom = Env.qa
|
||||||
|
elif env == "prod":
|
||||||
|
env_for_prom = Env.prod
|
||||||
|
else:
|
||||||
|
env_for_prom = Env.qa
|
||||||
|
|
||||||
config = ClientConfig(server_url_base=registry_url)
|
config = ClientConfig(server_url_base=registry_url)
|
||||||
client = Client(config)
|
client = Client(config)
|
||||||
target = format_self_target(scrape_port=metrics_port)
|
target = format_self_target(scrape_port=metrics_port)
|
||||||
registration = Registration(
|
registration = Registration(
|
||||||
target=target,
|
target=target,
|
||||||
env=env,
|
env=env_for_prom,
|
||||||
scheme=Scheme.http,
|
scheme=Scheme.http,
|
||||||
)
|
)
|
||||||
client.keep_registered_threaded(registration)
|
client.keep_registered_threaded(registration)
|
||||||
|
@ -39,7 +39,7 @@ import time
|
|||||||
thread_local = threading.local()
|
thread_local = threading.local()
|
||||||
|
|
||||||
PROXYRACK_PROXY = "@@@"
|
PROXYRACK_PROXY = "@@@"
|
||||||
MAX_YTDLP_ATTEMPTS = 4
|
MAX_YTDLP_ATTEMPTS = 3
|
||||||
YTDLP_WAIT = 10
|
YTDLP_WAIT = 10
|
||||||
|
|
||||||
|
|
||||||
@ -318,6 +318,9 @@ def _build_youtube_dl(worker, destdir, site, page):
|
|||||||
"logger": logging.getLogger("yt_dlp"),
|
"logger": logging.getLogger("yt_dlp"),
|
||||||
"verbose": False,
|
"verbose": False,
|
||||||
"quiet": False,
|
"quiet": False,
|
||||||
|
# does this make sense when we're generally downloading one at a time?
|
||||||
|
"sleep_interval": 25,
|
||||||
|
"max_sleep_interval": 90,
|
||||||
"proxy": PROXYRACK_PROXY,
|
"proxy": PROXYRACK_PROXY,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -352,12 +355,17 @@ def _remember_videos(page, pushed_videos=None):
|
|||||||
|
|
||||||
def _try_youtube_dl(worker, ydl, site, page):
|
def _try_youtube_dl(worker, ydl, site, page):
|
||||||
ytdlp_url = page.redirect_url if page.redirect_url else page.url
|
ytdlp_url = page.redirect_url if page.redirect_url else page.url
|
||||||
ytdlp_host = ytdlp_url.split("//")[-1].split("/")[0].split("?")[0]
|
youtube_host = "youtube.com" in ytdlp_url.split("//")[-1].split("/")[0].split("?")[0]
|
||||||
attempt = 0
|
attempt = 0
|
||||||
while attempt < MAX_YTDLP_ATTEMPTS:
|
while attempt < MAX_YTDLP_ATTEMPTS:
|
||||||
try:
|
try:
|
||||||
logging.info("trying yt-dlp on %s", ytdlp_url)
|
logging.info("trying yt-dlp on %s", ytdlp_url)
|
||||||
metrics.brozzler_ydl_download_attempts.labels(ytdlp_host).inc(1)
|
# should_download_vid = not youtube_host
|
||||||
|
# then
|
||||||
|
# ydl.extract_info(str(urlcanon.whatwg(ytdlp_url)), download=should_download_vid)
|
||||||
|
# if youtube_host and ie_result:
|
||||||
|
# download_url = ie_result.get("url")
|
||||||
|
metrics.brozzler_ydl_extract_attempts.labels(youtube_host).inc(1)
|
||||||
with brozzler.thread_accept_exceptions():
|
with brozzler.thread_accept_exceptions():
|
||||||
# we do whatwg canonicalization here to avoid "<urlopen error
|
# we do whatwg canonicalization here to avoid "<urlopen error
|
||||||
# no host given>" resulting in ProxyError
|
# no host given>" resulting in ProxyError
|
||||||
@ -366,6 +374,7 @@ def _try_youtube_dl(worker, ydl, site, page):
|
|||||||
ie_result = ydl.sanitize_info(
|
ie_result = ydl.sanitize_info(
|
||||||
ydl.extract_info(str(urlcanon.whatwg(ytdlp_url)))
|
ydl.extract_info(str(urlcanon.whatwg(ytdlp_url)))
|
||||||
)
|
)
|
||||||
|
metrics.brozzler_ydl_extract_successes.labels(youtube_host).inc(1)
|
||||||
break
|
break
|
||||||
except brozzler.ShutdownRequested as e:
|
except brozzler.ShutdownRequested as e:
|
||||||
raise
|
raise
|
||||||
@ -414,48 +423,14 @@ def _try_youtube_dl(worker, ydl, site, page):
|
|||||||
"with yt-dlp json for %s",
|
"with yt-dlp json for %s",
|
||||||
ytdlp_url,
|
ytdlp_url,
|
||||||
)
|
)
|
||||||
|
worker._warcprox_write_record(
|
||||||
attempt = 0
|
warcprox_address=worker._proxy_for(site),
|
||||||
while attempt < MAX_YTDLP_ATTEMPTS:
|
url="youtube-dl:%s" % str(urlcanon.semantic(ytdlp_url)),
|
||||||
try:
|
warc_type="metadata",
|
||||||
worker._warcprox_write_record(
|
content_type="application/vnd.youtube-dl_formats+json;charset=utf-8",
|
||||||
warcprox_address=worker._proxy_for(site),
|
payload=info_json.encode("utf-8"),
|
||||||
url="youtube-dl:%s" % str(urlcanon.semantic(ytdlp_url)),
|
extra_headers=site.extra_headers(page),
|
||||||
warc_type="metadata",
|
)
|
||||||
content_type="application/vnd.youtube-dl_formats+json;charset=utf-8",
|
|
||||||
payload=info_json.encode("utf-8"),
|
|
||||||
extra_headers=site.extra_headers(page),
|
|
||||||
)
|
|
||||||
break
|
|
||||||
except Exception as e:
|
|
||||||
# connection problem when using a proxy == proxy error
|
|
||||||
if (
|
|
||||||
hasattr(e, "exc_info")
|
|
||||||
and e.exc_info[0] == urllib.error.URLError
|
|
||||||
and worker._proxy_for(site)
|
|
||||||
):
|
|
||||||
attempt += 1
|
|
||||||
if attempt == MAX_YTDLP_ATTEMPTS:
|
|
||||||
logging.warning(
|
|
||||||
"Failed after %s attempts. Error: %s", MAX_YTDLP_ATTEMPTS, e
|
|
||||||
)
|
|
||||||
raise brozzler.ProxyError(
|
|
||||||
"yt-dlp hit proxy error storing media from %s with "
|
|
||||||
% ytdlp_url
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logging.info(
|
|
||||||
"Attempt %s failed. Retrying in %s seconds...",
|
|
||||||
attempt,
|
|
||||||
YTDLP_WAIT,
|
|
||||||
)
|
|
||||||
time.sleep(YTDLP_WAIT)
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
raise brozzler.ProxyError(
|
|
||||||
"Proxy attempt(s) storing media failed for unknown reason(s)"
|
|
||||||
)
|
|
||||||
return ie_result
|
return ie_result
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user