Merge branch 'master' into api-only

This commit is contained in:
Omar Roth 2018-11-11 09:41:45 -06:00
commit 850118601b
8 changed files with 109 additions and 34 deletions

View File

@ -146,6 +146,7 @@ $ ./sentry
- [Alternate Tube Redirector](https://addons.mozilla.org/en-US/firefox/addon/alternate-tube-redirector/): Automatically open Youtube Videos on alternate sites like Invidious or Hooktube. - [Alternate Tube Redirector](https://addons.mozilla.org/en-US/firefox/addon/alternate-tube-redirector/): Automatically open Youtube Videos on alternate sites like Invidious or Hooktube.
- [Invidious Redirect](https://greasyfork.org/en/scripts/370461-invidious-redirect): Redirects Youtube URLs to Invidio.us (userscript) - [Invidious Redirect](https://greasyfork.org/en/scripts/370461-invidious-redirect): Redirects Youtube URLs to Invidio.us (userscript)
- [Invidio.us embed](https://greasyfork.org/en/scripts/370442-invidious-embed): Replaces YouTube embeds with Invidio.us embeds (userscript) - [Invidio.us embed](https://greasyfork.org/en/scripts/370442-invidious-embed): Replaces YouTube embeds with Invidio.us embeds (userscript)
- [Invidious Downloader](https://github.com/erupete/InvidiousDownloader): Tampermonkey userscript for downloading videos or audio on Invidious (userscript)
## Contributing ## Contributing

View File

@ -13,7 +13,7 @@ dependencies:
github: detectlanguage/detectlanguage-crystal github: detectlanguage/detectlanguage-crystal
kemal: kemal:
github: kemalcr/kemal github: kemalcr/kemal
commit: b389022 commit: afd17fc
pg: pg:
github: will/crystal-pg github: will/crystal-pg

View File

@ -216,26 +216,7 @@ get "/api/v1/comments/:id" do |env|
halt env, status_code: 500, response: error_message halt env, status_code: 500, response: error_message
end end
if format == "json" next comments
next comments
else
comments = JSON.parse(comments)
content_html = template_youtube_comments(comments)
response = JSON.build do |json|
json.object do
json.field "contentHtml", content_html
if comments["commentCount"]?
json.field "commentCount", comments["commentCount"]
else
json.field "commentCount", 0
end
end
end
next response
end
elsif source == "reddit" elsif source == "reddit"
begin begin
comments, reddit_thread = fetch_reddit_comments(id) comments, reddit_thread = fetch_reddit_comments(id)
@ -598,7 +579,12 @@ get "/api/v1/channels/:ucid" do |env|
end end
page = 1 page = 1
videos, count = get_60_videos(ucid, page, auto_generated) begin
videos, count = get_60_videos(ucid, page, auto_generated)
rescue ex
error_message = {"error" => ex.message}.to_json
halt env, status_code: 500, response: error_message
end
client = make_client(YT_URL) client = make_client(YT_URL)
channel_html = client.get("/channel/#{ucid}/about?disable_polymer=1").body channel_html = client.get("/channel/#{ucid}/about?disable_polymer=1").body
@ -711,6 +697,7 @@ get "/api/v1/channels/:ucid" do |env|
json.field "published", video.published.to_unix json.field "published", video.published.to_unix
json.field "publishedText", "#{recode_date(video.published)} ago" json.field "publishedText", "#{recode_date(video.published)} ago"
json.field "lengthSeconds", video.length_seconds json.field "lengthSeconds", video.length_seconds
json.field "liveNow", video.live_now
json.field "paid", video.paid json.field "paid", video.paid
json.field "premium", video.premium json.field "premium", video.premium
end end
@ -738,7 +725,12 @@ end
halt env, status_code: 500, response: error_message halt env, status_code: 500, response: error_message
end end
videos, count = get_60_videos(ucid, page, auto_generated) begin
videos, count = get_60_videos(ucid, page, auto_generated)
rescue ex
error_message = {"error" => ex.message}.to_json
halt env, status_code: 500, response: error_message
end
result = JSON.build do |json| result = JSON.build do |json|
json.array do json.array do
@ -768,6 +760,7 @@ end
json.field "published", video.published.to_unix json.field "published", video.published.to_unix
json.field "publishedText", "#{recode_date(video.published)} ago" json.field "publishedText", "#{recode_date(video.published)} ago"
json.field "lengthSeconds", video.length_seconds json.field "lengthSeconds", video.length_seconds
json.field "liveNow", video.live_now
json.field "paid", video.paid json.field "paid", video.paid
json.field "premium", video.premium json.field "premium", video.premium
end end

View File

@ -58,7 +58,7 @@ end
def fetch_youtube_comments(id, continuation, proxies, format) def fetch_youtube_comments(id, continuation, proxies, format)
client = make_client(YT_URL) client = make_client(YT_URL)
html = client.get("/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1") html = client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999")
headers = HTTP::Headers.new headers = HTTP::Headers.new
headers["cookie"] = html.cookies.add_request_headers(headers)["cookie"] headers["cookie"] = html.cookies.add_request_headers(headers)["cookie"]
body = html.body body = html.body
@ -83,7 +83,7 @@ def fetch_youtube_comments(id, continuation, proxies, format)
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port]) proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
proxy_client.set_proxy(proxy) proxy_client.set_proxy(proxy)
response = proxy_client.get("/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1") response = proxy_client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999")
proxy_headers = HTTP::Headers.new proxy_headers = HTTP::Headers.new
proxy_headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"] proxy_headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
proxy_html = response.body proxy_html = response.body
@ -140,8 +140,8 @@ def fetch_youtube_comments(id, continuation, proxies, format)
headers["content-type"] = "application/x-www-form-urlencoded" headers["content-type"] = "application/x-www-form-urlencoded"
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ==" headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1" headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999"
headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1" headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999"
headers["x-youtube-client-name"] = "1" headers["x-youtube-client-name"] = "1"
headers["x-youtube-client-version"] = "2.20180719" headers["x-youtube-client-version"] = "2.20180719"
@ -264,6 +264,23 @@ def fetch_youtube_comments(id, continuation, proxies, format)
end end
end end
if format == "html"
comments = JSON.parse(comments)
content_html = template_youtube_comments(comments)
comments = JSON.build do |json|
json.object do
json.field "contentHtml", content_html
if comments["commentCount"]?
json.field "commentCount", comments["commentCount"]
else
json.field "commentCount", 0
end
end
end
end
return comments return comments
end end

View File

@ -296,3 +296,55 @@ def extract_items(nodeset, ucid = nil)
return items return items
end end
def create_response(user_id, operation, key, expire = 6.hours)
expire = Time.now + expire
nonce = Random::Secure.hex(4)
challenge = "#{expire.to_unix}-#{nonce}-#{user_id}-#{operation}"
token = OpenSSL::HMAC.digest(:sha256, key, challenge)
challenge = Base64.urlsafe_encode(challenge)
token = Base64.urlsafe_encode(token)
return challenge, token
end
def validate_response(challenge, token, user_id, operation, key)
if !challenge
raise "Hidden field \"challenge\" is a required field"
end
if !token
raise "Hidden field \"token\" is a required field"
end
challenge = Base64.decode_string(challenge)
if challenge.split("-").size == 4
expire, nonce, challenge_user_id, challenge_operation = challenge.split("-")
expire = expire.to_i?
expire ||= 0
else
raise "Invalid challenge"
end
challenge = OpenSSL::HMAC.digest(:sha256, HMAC_KEY, challenge)
challenge = Base64.urlsafe_encode(challenge)
if challenge != token
raise "Invalid token"
end
if challenge_operation != operation
raise "Invalid token"
end
if challenge_user_id != user_id
raise "Invalid token"
end
if expire < Time.now.to_unix
raise "Token is expired, please try again"
end
end

View File

@ -26,7 +26,7 @@ def fetch_mix(rdid, video_id, cookies = nil)
if cookies if cookies
headers = cookies.add_request_headers(headers) headers = cookies.add_request_headers(headers)
end end
response = client.get("/watch?v=#{video_id}&list=#{rdid}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en", headers) response = client.get("/watch?v=#{video_id}&list=#{rdid}&gl=US&hl=en&has_verified=1&bpctr=9999999999", headers)
yt_data = response.body.match(/window\["ytInitialData"\] = (?<data>.*);/) yt_data = response.body.match(/window\["ytInitialData"\] = (?<data>.*);/)
if yt_data if yt_data

View File

@ -30,7 +30,7 @@ def fetch_playlist_videos(plid, page, video_count, continuation = nil)
client = make_client(YT_URL) client = make_client(YT_URL)
if continuation if continuation
html = client.get("/watch?v=#{continuation}&list=#{plid}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1") html = client.get("/watch?v=#{continuation}&list=#{plid}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999")
html = XML.parse_html(html.body) html = XML.parse_html(html.body)
index = html.xpath_node(%q(//span[@id="playlist-current-index"])).try &.content.to_i? index = html.xpath_node(%q(//span[@id="playlist-current-index"])).try &.content.to_i?
@ -187,7 +187,7 @@ def fetch_playlist(plid)
author = anchor.xpath_node(%q(.//li[1]/a)).not_nil!.content author = anchor.xpath_node(%q(.//li[1]/a)).not_nil!.content
author_thumbnail = document.xpath_node(%q(//img[@class="channel-header-profile-image"])).try &.["src"] author_thumbnail = document.xpath_node(%q(//img[@class="channel-header-profile-image"])).try &.["src"]
author_thumbnail ||= "" author_thumbnail ||= ""
ucid = anchor.xpath_node(%q(.//li[1]/a)).not_nil!["href"].split("/")[2] ucid = anchor.xpath_node(%q(.//li[1]/a)).not_nil!["href"].split("/")[-1]
video_count = anchor.xpath_node(%q(.//li[2])).not_nil!.content.delete("videos, ").to_i video_count = anchor.xpath_node(%q(.//li[2])).not_nil!.content.delete("videos, ").to_i
views = anchor.xpath_node(%q(.//li[3])).not_nil!.content.delete("No views, ") views = anchor.xpath_node(%q(.//li[3])).not_nil!.content.delete("No views, ")

View File

@ -546,7 +546,7 @@ def fetch_video(id, proxies)
spawn do spawn do
client = make_client(YT_URL) client = make_client(YT_URL)
html = client.get("/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1") html = client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999")
if md = html.headers["location"]?.try &.match(/v=(?<id>[a-zA-Z0-9_-]{11})/) if md = html.headers["location"]?.try &.match(/v=(?<id>[a-zA-Z0-9_-]{11})/)
next html_channel.send(md["id"]) next html_channel.send(md["id"])
@ -620,7 +620,7 @@ def fetch_video(id, proxies)
client.connect_timeout = 10.seconds client.connect_timeout = 10.seconds
client.set_proxy(proxy) client.set_proxy(proxy)
html = XML.parse_html(client.get("/watch?v=#{id}&bpctr=#{Time.new.to_unix + 2000}&gl=US&hl=en&disable_polymer=1").body) html = XML.parse_html(client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999").body)
info = HTTP::Params.parse(client.get("/get_video_info?video_id=#{id}&el=detailpage&ps=default&eurl=&gl=US&hl=en&disable_polymer=1").body) info = HTTP::Params.parse(client.get("/get_video_info?video_id=#{id}&el=detailpage&ps=default&eurl=&gl=US&hl=en&disable_polymer=1").body)
if info["reason"]? if info["reason"]?
@ -641,7 +641,19 @@ def fetch_video(id, proxies)
end end
if info["reason"]? if info["reason"]?
raise info["reason"] html_info = html.to_s.match(/ytplayer\.config = (?<info>.*?);ytplayer\.load/).try &.["info"]
if html_info
html_info = JSON.parse(html_info)["args"].as_h
info.delete("reason")
html_info.each do |k, v|
info[k] = v.to_s
end
end
if info["reason"]?
raise info["reason"]
end
end end
title = info["title"] title = info["title"]
@ -699,7 +711,7 @@ def fetch_video(id, proxies)
sub_count_text = "0" sub_count_text = "0"
end end
author_thumbnail = html.xpath_node(%(//img[@alt="#{author}"])) author_thumbnail = html.xpath_node(%(//span[@class="yt-thumb-clip"]/img))
if author_thumbnail if author_thumbnail
author_thumbnail = author_thumbnail["data-thumb"] author_thumbnail = author_thumbnail["data-thumb"]
else else