From e30d70b6d49472438fefb012a7b1c0ad63c4f6b6 Mon Sep 17 00:00:00 2001 From: Omar Roth Date: Fri, 28 Jun 2019 21:17:56 -0500 Subject: [PATCH] Refactor proxy_list into global --- src/invidious.cr | 42 +++++++++++++++----------------- src/invidious/comments.cr | 6 ++--- src/invidious/helpers/helpers.cr | 6 ++--- src/invidious/helpers/utils.cr | 21 ++++------------ src/invidious/search.cr | 4 +-- src/invidious/trending.cr | 2 +- src/invidious/videos.cr | 16 ++++++------ 7 files changed, 42 insertions(+), 55 deletions(-) diff --git a/src/invidious.cr b/src/invidious.cr index 6f08ec66..ad985924 100644 --- a/src/invidious.cr +++ b/src/invidious.cr @@ -210,8 +210,6 @@ spawn do end end -proxies = PROXY_LIST - before_all do |env| host_url = make_host_url(config, Kemal.config) env.response.headers["X-XSS-Protection"] = "1; mode=block" @@ -383,7 +381,7 @@ get "/watch" do |env| env.params.query.delete_all("listen") begin - video = get_video(id, PG_DB, proxies, region: params.region) + video = get_video(id, PG_DB, region: params.region) rescue ex : VideoRedirect next env.redirect "/watch?v=#{ex.message}" rescue ex @@ -419,7 +417,7 @@ get "/watch" do |env| if source == "youtube" begin - comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, proxies, "html", locale, preferences.thin_mode, region))["contentHtml"] + comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, "html", locale, preferences.thin_mode, region))["contentHtml"] rescue ex if preferences.comments[1] == "reddit" comments, reddit_thread = fetch_reddit_comments(id) @@ -438,12 +436,12 @@ get "/watch" do |env| comment_html = replace_links(comment_html) rescue ex if preferences.comments[1] == "youtube" - comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, proxies, "html", locale, preferences.thin_mode, region))["contentHtml"] + comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, "html", locale, preferences.thin_mode, region))["contentHtml"] end end end else - comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, proxies, "html", locale, preferences.thin_mode, region))["contentHtml"] + comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, "html", locale, preferences.thin_mode, region))["contentHtml"] end comment_html ||= "" @@ -606,7 +604,7 @@ get "/embed/:id" do |env| subscriptions ||= [] of String begin - video = get_video(id, PG_DB, proxies, region: params.region) + video = get_video(id, PG_DB, region: params.region) rescue ex : VideoRedirect next env.redirect "/embed/#{ex.message}" rescue ex @@ -859,7 +857,7 @@ get "/search" do |env| next templated "error" end - count, videos = search(search_query, page, search_params, proxies, region).as(Tuple) + count, videos = search(search_query, page, search_params, region).as(Tuple) end templated "search" @@ -2411,7 +2409,7 @@ get "/feed/trending" do |env| region ||= "US" begin - trending, plid = fetch_trending(trending_type, proxies, region, locale) + trending, plid = fetch_trending(trending_type, region, locale) rescue ex error_message = "#{ex.message}" env.response.status_code = 500 @@ -2725,7 +2723,7 @@ post "/feed/webhook/:token" do |env| published = Time.parse_rfc3339(entry.xpath_node("published").not_nil!.content) updated = Time.parse_rfc3339(entry.xpath_node("updated").not_nil!.content) - video = get_video(id, PG_DB, proxies, force_refresh: true) + video = get_video(id, PG_DB, force_refresh: true) # Deliver notifications to `/api/v1/auth/notifications` payload = { @@ -3007,7 +3005,7 @@ get "/api/v1/storyboards/:id" do |env| client = make_client(YT_URL) begin - video = get_video(id, PG_DB, proxies, region: region) + video = get_video(id, PG_DB, region: region) rescue ex : VideoRedirect next env.redirect "/api/v1/storyboards/#{ex.message}" rescue ex @@ -3092,7 +3090,7 @@ get "/api/v1/captions/:id" do |env| client = make_client(YT_URL) begin - video = get_video(id, PG_DB, proxies, region: region) + video = get_video(id, PG_DB, region: region) rescue ex : VideoRedirect next env.redirect "/api/v1/captions/#{ex.message}" rescue ex @@ -3223,7 +3221,7 @@ get "/api/v1/comments/:id" do |env| sort_by ||= "top" begin - comments = fetch_youtube_comments(id, PG_DB, continuation, proxies, format, locale, thin_mode, region, sort_by: sort_by) + comments = fetch_youtube_comments(id, PG_DB, continuation, format, locale, thin_mode, region, sort_by: sort_by) rescue ex error_message = {"error" => ex.message}.to_json env.response.status_code = 500 @@ -3433,7 +3431,7 @@ get "/api/v1/videos/:id" do |env| region = env.params.query["region"]? begin - video = get_video(id, PG_DB, proxies, region: region) + video = get_video(id, PG_DB, region: region) rescue ex : VideoRedirect next env.redirect "/api/v1/videos/#{ex.message}" rescue ex @@ -3454,7 +3452,7 @@ get "/api/v1/trending" do |env| trending_type = env.params.query["type"]? begin - trending, plid = fetch_trending(trending_type, proxies, region, locale) + trending, plid = fetch_trending(trending_type, region, locale) rescue ex error_message = {"error" => ex.message}.to_json env.response.status_code = 500 @@ -3817,7 +3815,7 @@ get "/api/v1/search" do |env| end end - count, search_results = search(query, page, search_params, proxies, region).as(Tuple) + count, search_results = search(query, page, search_params, region).as(Tuple) JSON.build do |json| json.array do search_results.each do |item| @@ -3996,7 +3994,7 @@ get "/api/v1/auth/notifications" do |env| topics = env.params.query["topics"]?.try &.split(",").uniq.first(1000) topics ||= [] of String - create_notification_stream(env, proxies, config, Kemal.config, decrypt_function, topics, connection_channel) + create_notification_stream(env, config, Kemal.config, decrypt_function, topics, connection_channel) end post "/api/v1/auth/notifications" do |env| @@ -4005,7 +4003,7 @@ post "/api/v1/auth/notifications" do |env| topics = env.params.body["topics"]?.try &.split(",").uniq.first(1000) topics ||= [] of String - create_notification_stream(env, proxies, config, Kemal.config, decrypt_function, topics, connection_channel) + create_notification_stream(env, config, Kemal.config, decrypt_function, topics, connection_channel) end get "/api/v1/auth/preferences" do |env| @@ -4250,7 +4248,7 @@ get "/api/manifest/dash/id/:id" do |env| client = make_client(YT_URL) begin - video = get_video(id, PG_DB, proxies, region: region) + video = get_video(id, PG_DB, region: region) rescue ex : VideoRedirect url = "/api/manifest/dash/id/#{ex.message}" if env.params.query @@ -4440,7 +4438,7 @@ get "/latest_version" do |env| next end - video = get_video(id, PG_DB, proxies, region: region) + video = get_video(id, PG_DB, region: region) fmt_stream = video.fmt_stream(decrypt_function) adaptive_fmts = video.adaptive_fmts(decrypt_function) @@ -4556,7 +4554,7 @@ get "/videoplayback" do |env| response = HTTP::Client::Response.new(403) 5.times do begin - client = make_client(URI.parse(host), proxies, region) + client = make_client(URI.parse(host), region) response = client.head(url, headers) break rescue Socket::Addrinfo::Error @@ -4595,7 +4593,7 @@ get "/videoplayback" do |env| (range_begin...range_end).each_slice(HTTP_CHUNK_SIZE) do |slice| headers["Range"] = "bytes=#{slice[0]}-#{slice[-1]}" begin - client = make_client(URI.parse(host), proxies, region) + client = make_client(URI.parse(host), region) client.get(url, headers) do |response| content_range = response.headers["Content-Range"].lchop("bytes ") content_size = content_range.split("/")[-1].to_i diff --git a/src/invidious/comments.cr b/src/invidious/comments.cr index 7f593760..d20076d5 100644 --- a/src/invidious/comments.cr +++ b/src/invidious/comments.cr @@ -56,8 +56,8 @@ class RedditListing }) end -def fetch_youtube_comments(id, db, continuation, proxies, format, locale, thin_mode, region, sort_by = "top") - video = get_video(id, db, proxies, region: region) +def fetch_youtube_comments(id, db, continuation, format, locale, thin_mode, region, sort_by = "top") + video = get_video(id, db, region: region) session_token = video.info["session_token"]? ctoken = produce_comment_continuation(id, cursor: "", sort_by: sort_by) @@ -75,7 +75,7 @@ def fetch_youtube_comments(id, db, continuation, proxies, format, locale, thin_m session_token: session_token, } - client = make_client(YT_URL, proxies, video.info["region"]?) + client = make_client(YT_URL, video.info["region"]?) headers = HTTP::Headers.new headers["content-type"] = "application/x-www-form-urlencoded" diff --git a/src/invidious/helpers/helpers.cr b/src/invidious/helpers/helpers.cr index 4b0c1fde..197946f4 100644 --- a/src/invidious/helpers/helpers.cr +++ b/src/invidious/helpers/helpers.cr @@ -664,7 +664,7 @@ def copy_in_chunks(input, output, chunk_size = 8192) end end -def create_notification_stream(env, proxies, config, kemal_config, decrypt_function, topics, connection_channel) +def create_notification_stream(env, config, kemal_config, decrypt_function, topics, connection_channel) connection = Channel(PQ::Notification).new(8) connection_channel.send({true, connection}) @@ -682,7 +682,7 @@ def create_notification_stream(env, proxies, config, kemal_config, decrypt_funct published = Time.utc - Time::Span.new(time_span[0], time_span[1], time_span[2], time_span[3]) video_id = TEST_IDS[rand(TEST_IDS.size)] - video = get_video(video_id, PG_DB, proxies) + video = get_video(video_id, PG_DB) video.published = published response = JSON.parse(video.to_json(locale, config, kemal_config, decrypt_function)) @@ -758,7 +758,7 @@ def create_notification_stream(env, proxies, config, kemal_config, decrypt_funct next end - video = get_video(video_id, PG_DB, proxies) + video = get_video(video_id, PG_DB) video.published = Time.unix(published) response = JSON.parse(video.to_json(locale, config, Kemal.config, decrypt_function)) diff --git a/src/invidious/helpers/utils.cr b/src/invidious/helpers/utils.cr index 0f853f31..472e2600 100644 --- a/src/invidious/helpers/utils.cr +++ b/src/invidious/helpers/utils.cr @@ -18,24 +18,13 @@ def elapsed_text(elapsed) "#{(millis * 1000).round(2)}µs" end -def make_client(url : URI, proxies = {} of String => Array({ip: String, port: Int32}), region = nil) - context = nil - - if url.scheme == "https" - context = OpenSSL::SSL::Context::Client.new - context.add_options( - OpenSSL::SSL::Options::ALL | - OpenSSL::SSL::Options::NO_SSL_V2 | - OpenSSL::SSL::Options::NO_SSL_V3 - ) - end - - client = HTTPClient.new(url, context) - client.read_timeout = 10.seconds - client.connect_timeout = 10.seconds +def make_client(url : URI, region = nil) + client = HTTPClient.new(url) + client.read_timeout = 15.seconds + client.connect_timeout = 15.seconds if region - proxies[region]?.try &.sample(40).each do |proxy| + PROXY_LIST[region]?.try &.sample(40).each do |proxy| begin proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port]) client.set_proxy(proxy) diff --git a/src/invidious/search.cr b/src/invidious/search.cr index c69f96cf..ebeb2236 100644 --- a/src/invidious/search.cr +++ b/src/invidious/search.cr @@ -256,8 +256,8 @@ def channel_search(query, page, channel) return count, items end -def search(query, page = 1, search_params = produce_search_params(content_type: "all"), proxies = nil, region = nil) - client = make_client(YT_URL, proxies, region) +def search(query, page = 1, search_params = produce_search_params(content_type: "all"), region = nil) + client = make_client(YT_URL, region) if query.empty? return {0, [] of SearchItem} end diff --git a/src/invidious/trending.cr b/src/invidious/trending.cr index 8e55f207..5f9d7920 100644 --- a/src/invidious/trending.cr +++ b/src/invidious/trending.cr @@ -1,4 +1,4 @@ -def fetch_trending(trending_type, proxies, region, locale) +def fetch_trending(trending_type, region, locale) client = make_client(YT_URL) headers = HTTP::Headers.new headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36" diff --git a/src/invidious/videos.cr b/src/invidious/videos.cr index f1b8e3f8..ff708df7 100644 --- a/src/invidious/videos.cr +++ b/src/invidious/videos.cr @@ -869,7 +869,7 @@ end class VideoRedirect < Exception end -def get_video(id, db, proxies = {} of String => Array({ip: String, port: Int32}), refresh = true, region = nil, force_refresh = false) +def get_video(id, db, refresh = true, region = nil, force_refresh = false) if (video = db.query_one?("SELECT * FROM videos WHERE id = $1", id, as: Video)) && !region # If record was last updated over 10 minutes ago, or video has since premiered, # refresh (expire param in response lasts for 6 hours) @@ -878,7 +878,7 @@ def get_video(id, db, proxies = {} of String => Array({ip: String, port: Int32}) (video.premiere_timestamp && video.premiere_timestamp.as(Time) < Time.utc)) || force_refresh begin - video = fetch_video(id, proxies, region) + video = fetch_video(id, region) video_array = video.to_a args = arg_array(video_array[1..-1], 2) @@ -893,7 +893,7 @@ def get_video(id, db, proxies = {} of String => Array({ip: String, port: Int32}) end end else - video = fetch_video(id, proxies, region) + video = fetch_video(id, region) video_array = video.to_a args = arg_array(video_array) @@ -1097,8 +1097,8 @@ def extract_player_config(body, html) return params end -def fetch_video(id, proxies, region) - client = make_client(YT_URL, proxies, region) +def fetch_video(id, region) + client = make_client(YT_URL, region) response = client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999") if md = response.headers["location"]?.try &.match(/v=(?[a-zA-Z0-9_-]{11})/) @@ -1113,9 +1113,9 @@ def fetch_video(id, proxies, region) if info["reason"]? && info["reason"].includes? "your country" bypass_channel = Channel({XML::Node, HTTP::Params} | Nil).new - proxies.each do |proxy_region, list| + PROXY_LIST.each do |proxy_region, list| spawn do - client = make_client(YT_URL, proxies, proxy_region) + client = make_client(YT_URL, proxy_region) proxy_response = client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999") proxy_html = XML.parse_html(proxy_response.body) @@ -1131,7 +1131,7 @@ def fetch_video(id, proxies, region) end end - proxies.size.times do + PROXY_LIST.size.times do response = bypass_channel.receive if response html, info = response