mirror of
https://github.com/iv-org/invidious.git
synced 2025-03-15 10:26:36 -04:00
Merge branch 'master' into api-only
This commit is contained in:
commit
564fba87d2
@ -16,7 +16,9 @@ dependencies:
|
||||
commit: afd17fc
|
||||
pg:
|
||||
github: will/crystal-pg
|
||||
sqlite3:
|
||||
github: crystal-lang/crystal-sqlite3
|
||||
|
||||
crystal: 0.26.1
|
||||
crystal: 0.27.0
|
||||
|
||||
license: AGPLv3
|
||||
|
@ -16,10 +16,12 @@
|
||||
|
||||
require "crypto/bcrypt/password"
|
||||
require "detect_language"
|
||||
require "digest/md5"
|
||||
require "kemal"
|
||||
require "openssl/hmac"
|
||||
require "option_parser"
|
||||
require "pg"
|
||||
require "sqlite3"
|
||||
require "xml"
|
||||
require "yaml"
|
||||
require "zip"
|
||||
@ -524,14 +526,19 @@ get "/api/v1/videos/:id" do |env|
|
||||
end
|
||||
|
||||
get "/api/v1/trending" do |env|
|
||||
client = make_client(YT_URL)
|
||||
trending = client.get("/feed/trending?disable_polymer=1").body
|
||||
region = env.params.query["region"]?
|
||||
trending_type = env.params.query["type"]?
|
||||
|
||||
begin
|
||||
trending = fetch_trending(trending_type, proxies, region)
|
||||
rescue ex
|
||||
error_message = {"error" => ex.message}.to_json
|
||||
halt env, status_code: 500, response: error_message
|
||||
end
|
||||
|
||||
trending = XML.parse_html(trending)
|
||||
videos = JSON.build do |json|
|
||||
json.array do
|
||||
nodeset = trending.xpath_nodes(%q(//ul/li[@class="expanded-shelf-content-item-wrapper"]))
|
||||
extract_videos(nodeset).each do |video|
|
||||
trending.each do |video|
|
||||
json.object do
|
||||
json.field "title", video.title
|
||||
json.field "videoId", video.id
|
||||
@ -550,6 +557,9 @@ get "/api/v1/trending" do |env|
|
||||
json.field "publishedText", "#{recode_date(video.published)} ago"
|
||||
json.field "description", video.description
|
||||
json.field "descriptionHtml", video.description_html
|
||||
json.field "liveNow", video.live_now
|
||||
json.field "paid", video.paid
|
||||
json.field "premium", video.premium
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -1367,45 +1377,24 @@ get "/videoplayback" do |env|
|
||||
host = "https://r#{fvip}---#{mn}.googlevideo.com"
|
||||
url = "/videoplayback?#{query_params.to_s}"
|
||||
|
||||
if query_params["region"]?
|
||||
client = make_client(URI.parse(host))
|
||||
response = HTTP::Client::Response.new(status_code: 403)
|
||||
|
||||
if !proxies[query_params["region"]]?
|
||||
halt env, status_code: 403
|
||||
end
|
||||
|
||||
proxies[query_params["region"]].each do |proxy|
|
||||
begin
|
||||
client = HTTPClient.new(URI.parse(host))
|
||||
client.read_timeout = 10.seconds
|
||||
client.connect_timeout = 10.seconds
|
||||
|
||||
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
|
||||
client.set_proxy(proxy)
|
||||
|
||||
response = client.head(url)
|
||||
if response.status_code == 200
|
||||
# For whatever reason the proxy needs to be set again
|
||||
client.set_proxy(proxy)
|
||||
break
|
||||
end
|
||||
rescue ex
|
||||
end
|
||||
end
|
||||
else
|
||||
client = make_client(URI.parse(host))
|
||||
response = client.head(url)
|
||||
end
|
||||
|
||||
if response.status_code != 200
|
||||
halt env, status_code: 403
|
||||
end
|
||||
region = query_params["region"]?
|
||||
client = make_client(URI.parse(host), proxies, region)
|
||||
response = client.head(url)
|
||||
|
||||
if response.headers["Location"]?
|
||||
url = URI.parse(response.headers["Location"])
|
||||
env.response.headers["Access-Control-Allow-Origin"] = "*"
|
||||
next env.redirect url.full_path
|
||||
|
||||
url = url.full_path
|
||||
if region
|
||||
url += "®ion=#{region}"
|
||||
end
|
||||
|
||||
next env.redirect url
|
||||
end
|
||||
|
||||
if response.status_code >= 400
|
||||
halt env, status_code: 403
|
||||
end
|
||||
|
||||
headers = env.request.headers
|
||||
@ -1414,6 +1403,7 @@ get "/videoplayback" do |env|
|
||||
headers.delete("User-Agent")
|
||||
headers.delete("Referer")
|
||||
|
||||
client = make_client(URI.parse(host), proxies, region)
|
||||
client.get(url, headers) do |response|
|
||||
env.response.status_code = response.status_code
|
||||
|
||||
|
@ -70,34 +70,18 @@ def fetch_youtube_comments(id, continuation, proxies, format)
|
||||
if body.match(/<meta itemprop="regionsAllowed" content="">/)
|
||||
bypass_channel = Channel({String, HTTPClient, HTTP::Headers} | Nil).new
|
||||
|
||||
proxies.each do |region, list|
|
||||
proxies.each do |proxy_region, list|
|
||||
spawn do
|
||||
proxy_html = %(<meta itemprop="regionsAllowed" content="">)
|
||||
proxy_client = make_client(YT_URL, proxies, proxy_region)
|
||||
|
||||
list.each do |proxy|
|
||||
begin
|
||||
proxy_client = HTTPClient.new(YT_URL)
|
||||
proxy_client.read_timeout = 10.seconds
|
||||
proxy_client.connect_timeout = 10.seconds
|
||||
response = proxy_client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999")
|
||||
proxy_headers = HTTP::Headers.new
|
||||
proxy_headers["Cookie"] = response.cookies.add_request_headers(headers)["cookie"]
|
||||
proxy_html = response.body
|
||||
|
||||
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
|
||||
proxy_client.set_proxy(proxy)
|
||||
|
||||
response = proxy_client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999")
|
||||
proxy_headers = HTTP::Headers.new
|
||||
proxy_headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
|
||||
proxy_html = response.body
|
||||
|
||||
if !proxy_html.match(/<meta itemprop="regionsAllowed" content="">/)
|
||||
bypass_channel.send({proxy_html, proxy_client, proxy_headers})
|
||||
break
|
||||
end
|
||||
rescue ex
|
||||
end
|
||||
end
|
||||
|
||||
# If none of the proxies we tried returned a valid response
|
||||
if proxy_html.match(/<meta itemprop="regionsAllowed" content="">/)
|
||||
if !proxy_html.match(/<meta itemprop="regionsAllowed" content="">/)
|
||||
bypass_channel.send({proxy_html, proxy_client, proxy_headers})
|
||||
else
|
||||
bypass_channel.send(nil)
|
||||
end
|
||||
end
|
||||
@ -106,12 +90,12 @@ def fetch_youtube_comments(id, continuation, proxies, format)
|
||||
proxies.size.times do
|
||||
response = bypass_channel.receive
|
||||
if response
|
||||
session_token = response[0].match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/).not_nil!["session_token"]
|
||||
itct = response[0].match(/itct=(?<itct>[^"]+)"/).not_nil!["itct"]
|
||||
ctoken = response[0].match(/'COMMENTS_TOKEN': "(?<ctoken>[^"]+)"/)
|
||||
html, client, headers = response
|
||||
|
||||
session_token = html.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/).not_nil!["session_token"]
|
||||
itct = html.match(/itct=(?<itct>[^"]+)"/).not_nil!["itct"]
|
||||
ctoken = html.match(/'COMMENTS_TOKEN': "(?<ctoken>[^"]+)"/)
|
||||
|
||||
client = response[1]
|
||||
headers = response[2]
|
||||
break
|
||||
end
|
||||
end
|
||||
|
@ -30,7 +30,7 @@ def make_client(url, proxies = {} of String => Array({ip: String, port: Int32}),
|
||||
client.connect_timeout = 10.seconds
|
||||
|
||||
if region
|
||||
proxies[region]?.try &.each do |proxy|
|
||||
proxies[region]?.try &.sample(40).each do |proxy|
|
||||
begin
|
||||
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
|
||||
client.set_proxy(proxy)
|
||||
|
41
src/invidious/trending.cr
Normal file
41
src/invidious/trending.cr
Normal file
@ -0,0 +1,41 @@
|
||||
def fetch_trending(trending_type, proxies, region)
|
||||
client = make_client(YT_URL)
|
||||
headers = HTTP::Headers.new
|
||||
headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36"
|
||||
|
||||
region ||= "US"
|
||||
region = region.upcase
|
||||
|
||||
trending = ""
|
||||
if trending_type
|
||||
trending_type = trending_type.downcase.capitalize
|
||||
|
||||
response = client.get("/feed/trending?gl=#{region}&hl=en", headers).body
|
||||
|
||||
yt_data = response.match(/window\["ytInitialData"\] = (?<data>.*);/)
|
||||
if yt_data
|
||||
yt_data = JSON.parse(yt_data["data"].rchop(";"))
|
||||
else
|
||||
raise "Could not pull trending pages."
|
||||
end
|
||||
|
||||
tabs = yt_data["contents"]["twoColumnBrowseResultsRenderer"]["tabs"][0]["tabRenderer"]["content"]["sectionListRenderer"]["subMenu"]["channelListSubMenuRenderer"]["contents"].as_a
|
||||
url = tabs.select { |tab| tab["channelListSubMenuAvatarRenderer"]["title"]["simpleText"] == trending_type }[0]?
|
||||
|
||||
if url
|
||||
url = url["channelListSubMenuAvatarRenderer"]["navigationEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"].as_s
|
||||
url += "&disable_polymer=1&gl=#{region}&hl=en"
|
||||
trending = client.get(url).body
|
||||
else
|
||||
trending = client.get("/feed/trending?gl=#{region}&hl=en&disable_polymer=1").body
|
||||
end
|
||||
else
|
||||
trending = client.get("/feed/trending?gl=#{region}&hl=en&disable_polymer=1").body
|
||||
end
|
||||
|
||||
trending = XML.parse_html(trending)
|
||||
nodeset = trending.xpath_nodes(%q(//ul/li[@class="expanded-shelf-content-item-wrapper"]))
|
||||
trending = extract_videos(nodeset)
|
||||
|
||||
return trending
|
||||
end
|
@ -578,47 +578,26 @@ def fetch_video(id, proxies, region)
|
||||
info = info_channel.receive
|
||||
|
||||
if info["reason"]? && info["reason"].includes? "your country"
|
||||
bypass_channel = Channel(HTTPProxy | Nil).new
|
||||
bypass_channel = Channel({HTTPClient, String} | Nil).new
|
||||
|
||||
proxies.each do |region, list|
|
||||
proxies.each do |proxy_region, list|
|
||||
spawn do
|
||||
info = HTTP::Params.new({
|
||||
"reason" => [info["reason"]],
|
||||
})
|
||||
client = make_client(YT_URL, proxies, proxy_region)
|
||||
|
||||
list.each do |proxy|
|
||||
begin
|
||||
client = HTTPClient.new(YT_URL)
|
||||
client.read_timeout = 10.seconds
|
||||
client.connect_timeout = 10.seconds
|
||||
|
||||
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
|
||||
client.set_proxy(proxy)
|
||||
|
||||
info = HTTP::Params.parse(client.get("/get_video_info?video_id=#{id}&ps=default&eurl=&gl=US&hl=en&disable_polymer=1").body)
|
||||
if !info["reason"]?
|
||||
bypass_channel.send(proxy)
|
||||
break
|
||||
end
|
||||
rescue ex
|
||||
end
|
||||
end
|
||||
|
||||
# If none of the proxies we tried returned a valid response
|
||||
if info["reason"]?
|
||||
info = HTTP::Params.parse(client.get("/get_video_info?video_id=#{id}&ps=default&eurl=&gl=US&hl=en&disable_polymer=1").body)
|
||||
if !info["reason"]?
|
||||
bypass_channel.send({client, proxy_region})
|
||||
else
|
||||
bypass_channel.send(nil)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
proxies.size.times do
|
||||
proxy = bypass_channel.receive
|
||||
if proxy
|
||||
response = bypass_channel.receive
|
||||
if response
|
||||
begin
|
||||
client = HTTPClient.new(YT_URL)
|
||||
client.read_timeout = 10.seconds
|
||||
client.connect_timeout = 10.seconds
|
||||
client.set_proxy(proxy)
|
||||
client, proxy_region = response
|
||||
|
||||
html = XML.parse_html(client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999").body)
|
||||
info = HTTP::Params.parse(client.get("/get_video_info?video_id=#{id}&el=detailpage&ps=default&eurl=&gl=US&hl=en&disable_polymer=1").body)
|
||||
@ -627,11 +606,7 @@ def fetch_video(id, proxies, region)
|
||||
info = HTTP::Params.parse(client.get("/get_video_info?video_id=#{id}&ps=default&eurl=&gl=US&hl=en&disable_polymer=1").body)
|
||||
end
|
||||
|
||||
proxy = {ip: proxy.proxy_host, port: proxy.proxy_port}
|
||||
region_proxies = proxies.select { |region, list| list.includes? proxy }
|
||||
if !region_proxies.empty?
|
||||
info["region"] = region_proxies.keys[0]
|
||||
end
|
||||
info["region"] = proxy_region
|
||||
|
||||
break
|
||||
rescue ex
|
||||
|
11
src/invidious/views/trending.ecr
Normal file
11
src/invidious/views/trending.ecr
Normal file
@ -0,0 +1,11 @@
|
||||
<% content_for "header" do %>
|
||||
<title>Trending - Invidious</title>
|
||||
<% end %>
|
||||
|
||||
<div class="pure-g">
|
||||
<% trending.each_slice(4) do |slice| %>
|
||||
<% slice.each do |item| %>
|
||||
<%= rendered "components/item" %>
|
||||
<% end %>
|
||||
<% end %>
|
||||
</div>
|
Loading…
x
Reference in New Issue
Block a user