mirror of
https://github.com/iv-org/invidious.git
synced 2024-12-23 14:29:27 -05:00
Merge pull request #2428 from syeopite/ameba-fixes
Fix (some) issues detected by Ameba
This commit is contained in:
commit
b555df8377
@ -655,7 +655,7 @@ get "/subscription_manager" do |env|
|
|||||||
end
|
end
|
||||||
|
|
||||||
subscriptions = PG_DB.query_all("SELECT * FROM channels WHERE id = ANY(#{values})", as: InvidiousChannel)
|
subscriptions = PG_DB.query_all("SELECT * FROM channels WHERE id = ANY(#{values})", as: InvidiousChannel)
|
||||||
subscriptions.sort_by! { |channel| channel.author.downcase }
|
subscriptions.sort_by!(&.author.downcase)
|
||||||
|
|
||||||
if action_takeout
|
if action_takeout
|
||||||
if format == "json"
|
if format == "json"
|
||||||
@ -703,13 +703,13 @@ get "/subscription_manager" do |env|
|
|||||||
xml.element("outline", text: title, title: title) do
|
xml.element("outline", text: title, title: title) do
|
||||||
subscriptions.each do |channel|
|
subscriptions.each do |channel|
|
||||||
if format == "newpipe"
|
if format == "newpipe"
|
||||||
xmlUrl = "https://www.youtube.com/feeds/videos.xml?channel_id=#{channel.id}"
|
xml_url = "https://www.youtube.com/feeds/videos.xml?channel_id=#{channel.id}"
|
||||||
else
|
else
|
||||||
xmlUrl = "#{HOST_URL}/feed/channel/#{channel.id}"
|
xml_url = "#{HOST_URL}/feed/channel/#{channel.id}"
|
||||||
end
|
end
|
||||||
|
|
||||||
xml.element("outline", text: channel.author, title: channel.author,
|
xml.element("outline", text: channel.author, title: channel.author,
|
||||||
"type": "rss", xmlUrl: xmlUrl)
|
"type": "rss", xmlUrl: xml_url)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -759,7 +759,7 @@ post "/data_control" do |env|
|
|||||||
body = JSON.parse(body)
|
body = JSON.parse(body)
|
||||||
|
|
||||||
if body["subscriptions"]?
|
if body["subscriptions"]?
|
||||||
user.subscriptions += body["subscriptions"].as_a.map { |a| a.as_s }
|
user.subscriptions += body["subscriptions"].as_a.map(&.as_s)
|
||||||
user.subscriptions.uniq!
|
user.subscriptions.uniq!
|
||||||
|
|
||||||
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
|
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
|
||||||
@ -768,7 +768,7 @@ post "/data_control" do |env|
|
|||||||
end
|
end
|
||||||
|
|
||||||
if body["watch_history"]?
|
if body["watch_history"]?
|
||||||
user.watched += body["watch_history"].as_a.map { |a| a.as_s }
|
user.watched += body["watch_history"].as_a.map(&.as_s)
|
||||||
user.watched.uniq!
|
user.watched.uniq!
|
||||||
PG_DB.exec("UPDATE users SET watched = $1 WHERE email = $2", user.watched, user.email)
|
PG_DB.exec("UPDATE users SET watched = $1 WHERE email = $2", user.watched, user.email)
|
||||||
end
|
end
|
||||||
@ -876,12 +876,12 @@ post "/data_control" do |env|
|
|||||||
File.write(tempfile.path, entry.io.gets_to_end)
|
File.write(tempfile.path, entry.io.gets_to_end)
|
||||||
db = DB.open("sqlite3://" + tempfile.path)
|
db = DB.open("sqlite3://" + tempfile.path)
|
||||||
|
|
||||||
user.watched += db.query_all("SELECT url FROM streams", as: String).map { |url| url.lchop("https://www.youtube.com/watch?v=") }
|
user.watched += db.query_all("SELECT url FROM streams", as: String).map(&.lchop("https://www.youtube.com/watch?v="))
|
||||||
user.watched.uniq!
|
user.watched.uniq!
|
||||||
|
|
||||||
PG_DB.exec("UPDATE users SET watched = $1 WHERE email = $2", user.watched, user.email)
|
PG_DB.exec("UPDATE users SET watched = $1 WHERE email = $2", user.watched, user.email)
|
||||||
|
|
||||||
user.subscriptions += db.query_all("SELECT url FROM subscriptions", as: String).map { |url| url.lchop("https://www.youtube.com/channel/") }
|
user.subscriptions += db.query_all("SELECT url FROM subscriptions", as: String).map(&.lchop("https://www.youtube.com/channel/"))
|
||||||
user.subscriptions.uniq!
|
user.subscriptions.uniq!
|
||||||
|
|
||||||
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
|
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
|
||||||
@ -1351,7 +1351,7 @@ error 500 do |env, ex|
|
|||||||
error_template(500, ex)
|
error_template(500, ex)
|
||||||
end
|
end
|
||||||
|
|
||||||
static_headers do |response, filepath, filestat|
|
static_headers do |response|
|
||||||
response.headers.add("Cache-Control", "max-age=2629800")
|
response.headers.add("Cache-Control", "max-age=2629800")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -62,7 +62,7 @@ def get_about_info(ucid, locale)
|
|||||||
description_html = HTML.escape(description).gsub("\n", "<br>")
|
description_html = HTML.escape(description).gsub("\n", "<br>")
|
||||||
|
|
||||||
is_family_friendly = initdata["microformat"]["microformatDataRenderer"]["familySafe"].as_bool
|
is_family_friendly = initdata["microformat"]["microformatDataRenderer"]["familySafe"].as_bool
|
||||||
allowed_regions = initdata["microformat"]["microformatDataRenderer"]["availableCountries"].as_a.map { |a| a.as_s }
|
allowed_regions = initdata["microformat"]["microformatDataRenderer"]["availableCountries"].as_a.map(&.as_s)
|
||||||
|
|
||||||
related_channels = [] of AboutRelatedChannel
|
related_channels = [] of AboutRelatedChannel
|
||||||
else
|
else
|
||||||
@ -84,7 +84,7 @@ def get_about_info(ucid, locale)
|
|||||||
description_html = HTML.escape(description).gsub("\n", "<br>")
|
description_html = HTML.escape(description).gsub("\n", "<br>")
|
||||||
|
|
||||||
is_family_friendly = initdata["microformat"]["microformatDataRenderer"]["familySafe"].as_bool
|
is_family_friendly = initdata["microformat"]["microformatDataRenderer"]["familySafe"].as_bool
|
||||||
allowed_regions = initdata["microformat"]["microformatDataRenderer"]["availableCountries"].as_a.map { |a| a.as_s }
|
allowed_regions = initdata["microformat"]["microformatDataRenderer"]["availableCountries"].as_a.map(&.as_s)
|
||||||
|
|
||||||
related_channels = initdata["contents"]["twoColumnBrowseResultsRenderer"]
|
related_channels = initdata["contents"]["twoColumnBrowseResultsRenderer"]
|
||||||
.["secondaryContents"]?.try &.["browseSecondaryContentsRenderer"]["contents"][0]?
|
.["secondaryContents"]?.try &.["browseSecondaryContentsRenderer"]["contents"][0]?
|
||||||
@ -149,7 +149,7 @@ def get_about_info(ucid, locale)
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
tabs = tabs_json.reject { |node| node["tabRenderer"]?.nil? }.map { |node| node["tabRenderer"]["title"].as_s.downcase }
|
tabs = tabs_json.reject { |node| node["tabRenderer"]?.nil? }.map(&.["tabRenderer"]["title"].as_s.downcase)
|
||||||
end
|
end
|
||||||
|
|
||||||
sub_count = initdata["header"]["c4TabbedHeaderRenderer"]?.try &.["subscriberCountText"]?.try &.["simpleText"]?.try &.as_s?
|
sub_count = initdata["header"]["c4TabbedHeaderRenderer"]?.try &.["subscriberCountText"]?.try &.["simpleText"]?.try &.as_s?
|
||||||
|
@ -101,7 +101,7 @@ struct ChannelVideo
|
|||||||
def to_tuple
|
def to_tuple
|
||||||
{% begin %}
|
{% begin %}
|
||||||
{
|
{
|
||||||
{{*@type.instance_vars.map { |var| var.name }}}
|
{{*@type.instance_vars.map(&.name)}}
|
||||||
}
|
}
|
||||||
{% end %}
|
{% end %}
|
||||||
end
|
end
|
||||||
|
@ -242,7 +242,7 @@ def produce_channel_community_continuation(ucid, cursor)
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
continuation = object.try { |i| Protodec::Any.cast_json(object) }
|
continuation = object.try { |i| Protodec::Any.cast_json(i) }
|
||||||
.try { |i| Protodec::Any.from_json(i) }
|
.try { |i| Protodec::Any.from_json(i) }
|
||||||
.try { |i| Base64.urlsafe_encode(i) }
|
.try { |i| Base64.urlsafe_encode(i) }
|
||||||
.try { |i| URI.encode_www_form(i) }
|
.try { |i| URI.encode_www_form(i) }
|
||||||
@ -255,11 +255,11 @@ def extract_channel_community_cursor(continuation)
|
|||||||
.try { |i| Base64.decode(i) }
|
.try { |i| Base64.decode(i) }
|
||||||
.try { |i| IO::Memory.new(i) }
|
.try { |i| IO::Memory.new(i) }
|
||||||
.try { |i| Protodec::Any.parse(i) }
|
.try { |i| Protodec::Any.parse(i) }
|
||||||
.try { |i| i["80226972:0:embedded"]["3:1:base64"].as_h }
|
.try(&.["80226972:0:embedded"]["3:1:base64"].as_h)
|
||||||
|
|
||||||
if object["53:2:embedded"]?.try &.["3:0:embedded"]?
|
if object["53:2:embedded"]?.try &.["3:0:embedded"]?
|
||||||
object["53:2:embedded"]["3:0:embedded"]["2:0:string"] = object["53:2:embedded"]["3:0:embedded"]
|
object["53:2:embedded"]["3:0:embedded"]["2:0:string"] = object["53:2:embedded"]["3:0:embedded"]
|
||||||
.try { |i| i["2:0:base64"].as_h }
|
.try(&.["2:0:base64"].as_h)
|
||||||
.try { |i| Protodec::Any.cast_json(i) }
|
.try { |i| Protodec::Any.cast_json(i) }
|
||||||
.try { |i| Protodec::Any.from_json(i) }
|
.try { |i| Protodec::Any.from_json(i) }
|
||||||
.try { |i| Base64.urlsafe_encode(i, padding: false) }
|
.try { |i| Base64.urlsafe_encode(i, padding: false) }
|
||||||
|
@ -1,17 +1,17 @@
|
|||||||
def fetch_channel_playlists(ucid, author, continuation, sort_by)
|
def fetch_channel_playlists(ucid, author, continuation, sort_by)
|
||||||
if continuation
|
if continuation
|
||||||
response_json = YoutubeAPI.browse(continuation)
|
response_json = YoutubeAPI.browse(continuation)
|
||||||
continuationItems = response_json["onResponseReceivedActions"]?
|
continuation_items = response_json["onResponseReceivedActions"]?
|
||||||
.try &.[0]["appendContinuationItemsAction"]["continuationItems"]
|
.try &.[0]["appendContinuationItemsAction"]["continuationItems"]
|
||||||
|
|
||||||
return [] of SearchItem, nil if !continuationItems
|
return [] of SearchItem, nil if !continuation_items
|
||||||
|
|
||||||
items = [] of SearchItem
|
items = [] of SearchItem
|
||||||
continuationItems.as_a.select(&.as_h.has_key?("gridPlaylistRenderer")).each { |item|
|
continuation_items.as_a.select(&.as_h.has_key?("gridPlaylistRenderer")).each { |item|
|
||||||
extract_item(item, author, ucid).try { |t| items << t }
|
extract_item(item, author, ucid).try { |t| items << t }
|
||||||
}
|
}
|
||||||
|
|
||||||
continuation = continuationItems.as_a.last["continuationItemRenderer"]?
|
continuation = continuation_items.as_a.last["continuationItemRenderer"]?
|
||||||
.try &.["continuationEndpoint"]["continuationCommand"]["token"].as_s
|
.try &.["continuationEndpoint"]["continuationCommand"]["token"].as_s
|
||||||
else
|
else
|
||||||
url = "/channel/#{ucid}/playlists?flow=list&view=1"
|
url = "/channel/#{ucid}/playlists?flow=list&view=1"
|
||||||
@ -84,7 +84,7 @@ def produce_channel_playlists_url(ucid, cursor, sort = "newest", auto_generated
|
|||||||
object["80226972:embedded"]["3:string"] = Base64.urlsafe_encode(Protodec::Any.from_json(Protodec::Any.cast_json(object["80226972:embedded"]["3:base64"])))
|
object["80226972:embedded"]["3:string"] = Base64.urlsafe_encode(Protodec::Any.from_json(Protodec::Any.cast_json(object["80226972:embedded"]["3:base64"])))
|
||||||
object["80226972:embedded"].delete("3:base64")
|
object["80226972:embedded"].delete("3:base64")
|
||||||
|
|
||||||
continuation = object.try { |i| Protodec::Any.cast_json(object) }
|
continuation = object.try { |i| Protodec::Any.cast_json(i) }
|
||||||
.try { |i| Protodec::Any.from_json(i) }
|
.try { |i| Protodec::Any.from_json(i) }
|
||||||
.try { |i| Base64.urlsafe_encode(i) }
|
.try { |i| Base64.urlsafe_encode(i) }
|
||||||
.try { |i| URI.encode_www_form(i) }
|
.try { |i| URI.encode_www_form(i) }
|
||||||
|
@ -49,7 +49,7 @@ def produce_channel_videos_continuation(ucid, page = 1, auto_generated = nil, so
|
|||||||
object["80226972:embedded"]["3:string"] = Base64.urlsafe_encode(Protodec::Any.from_json(Protodec::Any.cast_json(object["80226972:embedded"]["3:base64"])))
|
object["80226972:embedded"]["3:string"] = Base64.urlsafe_encode(Protodec::Any.from_json(Protodec::Any.cast_json(object["80226972:embedded"]["3:base64"])))
|
||||||
object["80226972:embedded"].delete("3:base64")
|
object["80226972:embedded"].delete("3:base64")
|
||||||
|
|
||||||
continuation = object.try { |i| Protodec::Any.cast_json(object) }
|
continuation = object.try { |i| Protodec::Any.cast_json(i) }
|
||||||
.try { |i| Protodec::Any.from_json(i) }
|
.try { |i| Protodec::Any.from_json(i) }
|
||||||
.try { |i| Base64.urlsafe_encode(i) }
|
.try { |i| Base64.urlsafe_encode(i) }
|
||||||
.try { |i| URI.encode_www_form(i) }
|
.try { |i| URI.encode_www_form(i) }
|
||||||
|
@ -72,10 +72,9 @@ def fetch_youtube_comments(id, cursor, format, locale, thin_mode, region, sort_b
|
|||||||
response = YoutubeAPI.next(continuation: ctoken, client_config: client_config)
|
response = YoutubeAPI.next(continuation: ctoken, client_config: client_config)
|
||||||
contents = nil
|
contents = nil
|
||||||
|
|
||||||
if response["onResponseReceivedEndpoints"]?
|
if on_response_received_endpoints = response["onResponseReceivedEndpoints"]?
|
||||||
onResponseReceivedEndpoints = response["onResponseReceivedEndpoints"]
|
|
||||||
header = nil
|
header = nil
|
||||||
onResponseReceivedEndpoints.as_a.each do |item|
|
on_response_received_endpoints.as_a.each do |item|
|
||||||
if item["reloadContinuationItemsCommand"]?
|
if item["reloadContinuationItemsCommand"]?
|
||||||
case item["reloadContinuationItemsCommand"]["slot"]
|
case item["reloadContinuationItemsCommand"]["slot"]
|
||||||
when "RELOAD_CONTINUATION_SLOT_HEADER"
|
when "RELOAD_CONTINUATION_SLOT_HEADER"
|
||||||
@ -97,7 +96,8 @@ def fetch_youtube_comments(id, cursor, format, locale, thin_mode, region, sort_b
|
|||||||
contents = body["contents"]?
|
contents = body["contents"]?
|
||||||
header = body["header"]?
|
header = body["header"]?
|
||||||
if body["continuations"]?
|
if body["continuations"]?
|
||||||
moreRepliesContinuation = body["continuations"][0]["nextContinuationData"]["continuation"].as_s
|
# Removable? Doesn't seem like this is used.
|
||||||
|
more_replies_continuation = body["continuations"][0]["nextContinuationData"]["continuation"].as_s
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
raise InfoException.new("Could not fetch comments")
|
raise InfoException.new("Could not fetch comments")
|
||||||
@ -111,10 +111,10 @@ def fetch_youtube_comments(id, cursor, format, locale, thin_mode, region, sort_b
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
continuationItemRenderer = nil
|
continuation_item_renderer = nil
|
||||||
contents.as_a.reject! do |item|
|
contents.as_a.reject! do |item|
|
||||||
if item["continuationItemRenderer"]?
|
if item["continuationItemRenderer"]?
|
||||||
continuationItemRenderer = item["continuationItemRenderer"]
|
continuation_item_renderer = item["continuationItemRenderer"]
|
||||||
true
|
true
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -232,14 +232,14 @@ def fetch_youtube_comments(id, cursor, format, locale, thin_mode, region, sort_b
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
if continuationItemRenderer
|
if continuation_item_renderer
|
||||||
if continuationItemRenderer["continuationEndpoint"]?
|
if continuation_item_renderer["continuationEndpoint"]?
|
||||||
continuationEndpoint = continuationItemRenderer["continuationEndpoint"]
|
continuation_endpoint = continuation_item_renderer["continuationEndpoint"]
|
||||||
elsif continuationItemRenderer["button"]?
|
elsif continuation_item_renderer["button"]?
|
||||||
continuationEndpoint = continuationItemRenderer["button"]["buttonRenderer"]["command"]
|
continuation_endpoint = continuation_item_renderer["button"]["buttonRenderer"]["command"]
|
||||||
end
|
end
|
||||||
if continuationEndpoint
|
if continuation_endpoint
|
||||||
json.field "continuation", continuationEndpoint["continuationCommand"]["token"].as_s
|
json.field "continuation", continuation_endpoint["continuationCommand"]["token"].as_s
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -638,7 +638,7 @@ def produce_comment_continuation(video_id, cursor = "", sort_by = "top")
|
|||||||
object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 0_i64
|
object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 0_i64
|
||||||
end
|
end
|
||||||
|
|
||||||
continuation = object.try { |i| Protodec::Any.cast_json(object) }
|
continuation = object.try { |i| Protodec::Any.cast_json(i) }
|
||||||
.try { |i| Protodec::Any.from_json(i) }
|
.try { |i| Protodec::Any.from_json(i) }
|
||||||
.try { |i| Base64.urlsafe_encode(i) }
|
.try { |i| Base64.urlsafe_encode(i) }
|
||||||
.try { |i| URI.encode_www_form(i) }
|
.try { |i| URI.encode_www_form(i) }
|
||||||
@ -673,7 +673,7 @@ def produce_comment_reply_continuation(video_id, ucid, comment_id)
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
continuation = object.try { |i| Protodec::Any.cast_json(object) }
|
continuation = object.try { |i| Protodec::Any.cast_json(i) }
|
||||||
.try { |i| Protodec::Any.from_json(i) }
|
.try { |i| Protodec::Any.from_json(i) }
|
||||||
.try { |i| Base64.urlsafe_encode(i) }
|
.try { |i| Base64.urlsafe_encode(i) }
|
||||||
.try { |i| URI.encode_www_form(i) }
|
.try { |i| URI.encode_www_form(i) }
|
||||||
|
@ -132,8 +132,6 @@ def error_redirect_helper(env : HTTP::Server::Context, locale : Hash(String, JSO
|
|||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
END_HTML
|
END_HTML
|
||||||
|
|
||||||
return next_step_html
|
|
||||||
else
|
else
|
||||||
return ""
|
return ""
|
||||||
end
|
end
|
||||||
|
@ -89,14 +89,14 @@ def check_table(db, table_name, struct_type = nil)
|
|||||||
struct_array = struct_type.type_array
|
struct_array = struct_type.type_array
|
||||||
column_array = get_column_array(db, table_name)
|
column_array = get_column_array(db, table_name)
|
||||||
column_types = File.read("config/sql/#{table_name}.sql").match(/CREATE TABLE public\.#{table_name}\n\((?<types>[\d\D]*?)\);/)
|
column_types = File.read("config/sql/#{table_name}.sql").match(/CREATE TABLE public\.#{table_name}\n\((?<types>[\d\D]*?)\);/)
|
||||||
.try &.["types"].split(",").map { |line| line.strip }.reject &.starts_with?("CONSTRAINT")
|
.try &.["types"].split(",").map(&.strip).reject &.starts_with?("CONSTRAINT")
|
||||||
|
|
||||||
return if !column_types
|
return if !column_types
|
||||||
|
|
||||||
struct_array.each_with_index do |name, i|
|
struct_array.each_with_index do |name, i|
|
||||||
if name != column_array[i]?
|
if name != column_array[i]?
|
||||||
if !column_array[i]?
|
if !column_array[i]?
|
||||||
new_column = column_types.select { |line| line.starts_with? name }[0]
|
new_column = column_types.select(&.starts_with?(name))[0]
|
||||||
LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
|
LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
|
||||||
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
|
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
|
||||||
next
|
next
|
||||||
@ -104,14 +104,14 @@ def check_table(db, table_name, struct_type = nil)
|
|||||||
|
|
||||||
# Column doesn't exist
|
# Column doesn't exist
|
||||||
if !column_array.includes? name
|
if !column_array.includes? name
|
||||||
new_column = column_types.select { |line| line.starts_with? name }[0]
|
new_column = column_types.select(&.starts_with?(name))[0]
|
||||||
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
|
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
|
||||||
end
|
end
|
||||||
|
|
||||||
# Column exists but in the wrong position, rotate
|
# Column exists but in the wrong position, rotate
|
||||||
if struct_array.includes? column_array[i]
|
if struct_array.includes? column_array[i]
|
||||||
until name == column_array[i]
|
until name == column_array[i]
|
||||||
new_column = column_types.select { |line| line.starts_with? column_array[i] }[0]?.try &.gsub("#{column_array[i]}", "#{column_array[i]}_new")
|
new_column = column_types.select(&.starts_with?(column_array[i]))[0]?.try &.gsub("#{column_array[i]}", "#{column_array[i]}_new")
|
||||||
|
|
||||||
# There's a column we didn't expect
|
# There's a column we didn't expect
|
||||||
if !new_column
|
if !new_column
|
||||||
|
@ -62,7 +62,7 @@ struct SearchVideo
|
|||||||
if xml
|
if xml
|
||||||
to_xml(HOST_URL, auto_generated, query_params, xml)
|
to_xml(HOST_URL, auto_generated, query_params, xml)
|
||||||
else
|
else
|
||||||
XML.build do |json|
|
XML.build do |xml|
|
||||||
to_xml(HOST_URL, auto_generated, query_params, xml)
|
to_xml(HOST_URL, auto_generated, query_params, xml)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -30,7 +30,7 @@ struct DecryptFunction
|
|||||||
|
|
||||||
case op_body
|
case op_body
|
||||||
when "{a.reverse()"
|
when "{a.reverse()"
|
||||||
operations[op_name] = ->(a : Array(String), b : Int32) { a.reverse }
|
operations[op_name] = ->(a : Array(String), _b : Int32) { a.reverse }
|
||||||
when "{a.splice(0,b)"
|
when "{a.splice(0,b)"
|
||||||
operations[op_name] = ->(a : Array(String), b : Int32) { a.delete_at(0..(b - 1)); a }
|
operations[op_name] = ->(a : Array(String), b : Int32) { a.delete_at(0..(b - 1)); a }
|
||||||
else
|
else
|
||||||
|
@ -173,7 +173,7 @@ module Kemal
|
|||||||
return
|
return
|
||||||
end
|
end
|
||||||
|
|
||||||
if @cached_files.sum { |element| element[1][:data].bytesize } + (size = File.size(file_path)) < CACHE_LIMIT
|
if @cached_files.sum(&.[1][:data].bytesize) + (size = File.size(file_path)) < CACHE_LIMIT
|
||||||
data = Bytes.new(size)
|
data = Bytes.new(size)
|
||||||
File.open(file_path) do |file|
|
File.open(file_path) do |file|
|
||||||
file.read(data)
|
file.read(data)
|
||||||
|
@ -46,7 +46,7 @@ def sign_token(key, hash)
|
|||||||
next if key == "signature"
|
next if key == "signature"
|
||||||
|
|
||||||
if value.is_a?(JSON::Any) && value.as_a?
|
if value.is_a?(JSON::Any) && value.as_a?
|
||||||
value = value.as_a.map { |i| i.as_s }
|
value = value.as_a.map(&.as_s)
|
||||||
end
|
end
|
||||||
|
|
||||||
case value
|
case value
|
||||||
@ -82,7 +82,7 @@ def validate_request(token, session, request, key, db, locale = nil)
|
|||||||
raise InfoException.new("Erroneous token")
|
raise InfoException.new("Erroneous token")
|
||||||
end
|
end
|
||||||
|
|
||||||
scopes = token["scopes"].as_a.map { |v| v.as_s }
|
scopes = token["scopes"].as_a.map(&.as_s)
|
||||||
scope = "#{request.method}:#{request.path.lchop("/api/v1/auth/").lstrip("/")}"
|
scope = "#{request.method}:#{request.path.lchop("/api/v1/auth/").lstrip("/")}"
|
||||||
if !scopes_include_scope(scopes, scope)
|
if !scopes_include_scope(scopes, scope)
|
||||||
raise InfoException.new("Invalid scope")
|
raise InfoException.new("Invalid scope")
|
||||||
@ -105,11 +105,11 @@ end
|
|||||||
|
|
||||||
def scope_includes_scope(scope, subset)
|
def scope_includes_scope(scope, subset)
|
||||||
methods, endpoint = scope.split(":")
|
methods, endpoint = scope.split(":")
|
||||||
methods = methods.split(";").map { |method| method.upcase }.reject { |method| method.empty? }.sort
|
methods = methods.split(";").map(&.upcase).reject(&.empty?).sort!
|
||||||
endpoint = endpoint.downcase
|
endpoint = endpoint.downcase
|
||||||
|
|
||||||
subset_methods, subset_endpoint = subset.split(":")
|
subset_methods, subset_endpoint = subset.split(":")
|
||||||
subset_methods = subset_methods.split(";").map { |method| method.upcase }.sort
|
subset_methods = subset_methods.split(";").map(&.upcase).sort!
|
||||||
subset_endpoint = subset_endpoint.downcase
|
subset_endpoint = subset_endpoint.downcase
|
||||||
|
|
||||||
if methods.empty?
|
if methods.empty?
|
||||||
|
@ -15,8 +15,8 @@ class Invidious::Jobs::PullPopularVideosJob < Invidious::Jobs::BaseJob
|
|||||||
def begin
|
def begin
|
||||||
loop do
|
loop do
|
||||||
videos = db.query_all(QUERY, as: ChannelVideo)
|
videos = db.query_all(QUERY, as: ChannelVideo)
|
||||||
.sort_by(&.published)
|
.sort_by!(&.published)
|
||||||
.reverse
|
.reverse!
|
||||||
|
|
||||||
POPULAR_VIDEOS.set(videos)
|
POPULAR_VIDEOS.set(videos)
|
||||||
|
|
||||||
|
@ -72,7 +72,7 @@ def fetch_mix(rdid, video_id, cookies = nil, locale = nil)
|
|||||||
videos += next_page.videos
|
videos += next_page.videos
|
||||||
end
|
end
|
||||||
|
|
||||||
videos.uniq! { |video| video.id }
|
videos.uniq!(&.id)
|
||||||
videos = videos.first(50)
|
videos = videos.first(50)
|
||||||
return Mix.new({
|
return Mix.new({
|
||||||
title: mix_title,
|
title: mix_title,
|
||||||
|
@ -51,7 +51,7 @@ struct PlaylistVideo
|
|||||||
if xml
|
if xml
|
||||||
to_xml(auto_generated, xml)
|
to_xml(auto_generated, xml)
|
||||||
else
|
else
|
||||||
XML.build do |json|
|
XML.build do |xml|
|
||||||
to_xml(auto_generated, xml)
|
to_xml(auto_generated, xml)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -143,7 +143,7 @@ struct Playlist
|
|||||||
json.field "videos" do
|
json.field "videos" do
|
||||||
json.array do
|
json.array do
|
||||||
videos = get_playlist_videos(PG_DB, self, offset: offset, locale: locale, video_id: video_id)
|
videos = get_playlist_videos(PG_DB, self, offset: offset, locale: locale, video_id: video_id)
|
||||||
videos.each_with_index do |video, index|
|
videos.each do |video|
|
||||||
video.to_json(locale, json)
|
video.to_json(locale, json)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -336,7 +336,7 @@ def produce_playlist_continuation(id, index)
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
continuation = object.try { |i| Protodec::Any.cast_json(object) }
|
continuation = object.try { |i| Protodec::Any.cast_json(i) }
|
||||||
.try { |i| Protodec::Any.from_json(i) }
|
.try { |i| Protodec::Any.from_json(i) }
|
||||||
.try { |i| Base64.urlsafe_encode(i) }
|
.try { |i| Base64.urlsafe_encode(i) }
|
||||||
.try { |i| URI.encode_www_form(i) }
|
.try { |i| URI.encode_www_form(i) }
|
||||||
|
@ -47,7 +47,7 @@ module Invidious::Routes::API::Manifest
|
|||||||
end
|
end
|
||||||
|
|
||||||
audio_streams = video.audio_streams
|
audio_streams = video.audio_streams
|
||||||
video_streams = video.video_streams.sort_by { |stream| {stream["width"].as_i, stream["fps"].as_i} }.reverse
|
video_streams = video.video_streams.sort_by { |stream| {stream["width"].as_i, stream["fps"].as_i} }.reverse!
|
||||||
|
|
||||||
manifest = XML.build(indent: " ", encoding: "UTF-8") do |xml|
|
manifest = XML.build(indent: " ", encoding: "UTF-8") do |xml|
|
||||||
xml.element("MPD", "xmlns": "urn:mpeg:dash:schema:mpd:2011",
|
xml.element("MPD", "xmlns": "urn:mpeg:dash:schema:mpd:2011",
|
||||||
|
@ -335,11 +335,11 @@ module Invidious::Routes::API::V1::Authenticated
|
|||||||
|
|
||||||
case env.request.headers["Content-Type"]?
|
case env.request.headers["Content-Type"]?
|
||||||
when "application/x-www-form-urlencoded"
|
when "application/x-www-form-urlencoded"
|
||||||
scopes = env.params.body.select { |k, v| k.match(/^scopes\[\d+\]$/) }.map { |k, v| v }
|
scopes = env.params.body.select { |k, _| k.match(/^scopes\[\d+\]$/) }.map { |_, v| v }
|
||||||
callback_url = env.params.body["callbackUrl"]?
|
callback_url = env.params.body["callbackUrl"]?
|
||||||
expire = env.params.body["expire"]?.try &.to_i?
|
expire = env.params.body["expire"]?.try &.to_i?
|
||||||
when "application/json"
|
when "application/json"
|
||||||
scopes = env.params.json["scopes"].as(Array).map { |v| v.as_s }
|
scopes = env.params.json["scopes"].as(Array).map(&.as_s)
|
||||||
callback_url = env.params.json["callbackUrl"]?.try &.as(String)
|
callback_url = env.params.json["callbackUrl"]?.try &.as(String)
|
||||||
expire = env.params.json["expire"]?.try &.as(Int64)
|
expire = env.params.json["expire"]?.try &.as(Int64)
|
||||||
else
|
else
|
||||||
|
@ -20,7 +20,7 @@ module Invidious::Routes::API::V1::Search
|
|||||||
duration = env.params.query["duration"]?.try &.downcase
|
duration = env.params.query["duration"]?.try &.downcase
|
||||||
duration ||= ""
|
duration ||= ""
|
||||||
|
|
||||||
features = env.params.query["features"]?.try &.split(",").map { |feature| feature.downcase }
|
features = env.params.query["features"]?.try &.split(",").map(&.downcase)
|
||||||
features ||= [] of String
|
features ||= [] of String
|
||||||
|
|
||||||
content_type = env.params.query["type"]?.try &.downcase
|
content_type = env.params.query["type"]?.try &.downcase
|
||||||
|
@ -58,7 +58,7 @@ module Invidious::Routes::API::V1::Videos
|
|||||||
captions.each do |caption|
|
captions.each do |caption|
|
||||||
json.object do
|
json.object do
|
||||||
json.field "label", caption.name
|
json.field "label", caption.name
|
||||||
json.field "languageCode", caption.languageCode
|
json.field "languageCode", caption.language_code
|
||||||
json.field "url", "/api/v1/captions/#{id}?label=#{URI.encode_www_form(caption.name)}"
|
json.field "url", "/api/v1/captions/#{id}?label=#{URI.encode_www_form(caption.name)}"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -73,7 +73,7 @@ module Invidious::Routes::API::V1::Videos
|
|||||||
env.response.content_type = "text/vtt; charset=UTF-8"
|
env.response.content_type = "text/vtt; charset=UTF-8"
|
||||||
|
|
||||||
if lang
|
if lang
|
||||||
caption = captions.select { |caption| caption.languageCode == lang }
|
caption = captions.select { |caption| caption.language_code == lang }
|
||||||
else
|
else
|
||||||
caption = captions.select { |caption| caption.name == label }
|
caption = captions.select { |caption| caption.name == label }
|
||||||
end
|
end
|
||||||
@ -84,7 +84,7 @@ module Invidious::Routes::API::V1::Videos
|
|||||||
caption = caption[0]
|
caption = caption[0]
|
||||||
end
|
end
|
||||||
|
|
||||||
url = URI.parse("#{caption.baseUrl}&tlang=#{tlang}").request_target
|
url = URI.parse("#{caption.base_url}&tlang=#{tlang}").request_target
|
||||||
|
|
||||||
# Auto-generated captions often have cues that aren't aligned properly with the video,
|
# Auto-generated captions often have cues that aren't aligned properly with the video,
|
||||||
# as well as some other markup that makes it cumbersome, so we try to fix that here
|
# as well as some other markup that makes it cumbersome, so we try to fix that here
|
||||||
@ -96,7 +96,7 @@ module Invidious::Routes::API::V1::Videos
|
|||||||
str << <<-END_VTT
|
str << <<-END_VTT
|
||||||
WEBVTT
|
WEBVTT
|
||||||
Kind: captions
|
Kind: captions
|
||||||
Language: #{tlang || caption.languageCode}
|
Language: #{tlang || caption.language_code}
|
||||||
|
|
||||||
|
|
||||||
END_VTT
|
END_VTT
|
||||||
|
@ -29,8 +29,8 @@ module Invidious::Routes::Channels
|
|||||||
item.author
|
item.author
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
items = items.select(&.is_a?(SearchPlaylist)).map(&.as(SearchPlaylist))
|
items = items.select(SearchPlaylist).map(&.as(SearchPlaylist))
|
||||||
items.each { |item| item.author = "" }
|
items.each(&.author = "")
|
||||||
else
|
else
|
||||||
sort_options = {"newest", "oldest", "popular"}
|
sort_options = {"newest", "oldest", "popular"}
|
||||||
sort_by ||= "newest"
|
sort_by ||= "newest"
|
||||||
@ -57,8 +57,8 @@ module Invidious::Routes::Channels
|
|||||||
end
|
end
|
||||||
|
|
||||||
items, continuation = fetch_channel_playlists(channel.ucid, channel.author, continuation, sort_by)
|
items, continuation = fetch_channel_playlists(channel.ucid, channel.author, continuation, sort_by)
|
||||||
items = items.select { |item| item.is_a?(SearchPlaylist) }.map { |item| item.as(SearchPlaylist) }
|
items = items.select(SearchPlaylist).map(&.as(SearchPlaylist))
|
||||||
items.each { |item| item.author = "" }
|
items.each(&.author = "")
|
||||||
|
|
||||||
templated "playlists"
|
templated "playlists"
|
||||||
end
|
end
|
||||||
|
@ -168,11 +168,11 @@ module Invidious::Routes::Embed
|
|||||||
|
|
||||||
preferred_captions = captions.select { |caption|
|
preferred_captions = captions.select { |caption|
|
||||||
params.preferred_captions.includes?(caption.name) ||
|
params.preferred_captions.includes?(caption.name) ||
|
||||||
params.preferred_captions.includes?(caption.languageCode.split("-")[0])
|
params.preferred_captions.includes?(caption.language_code.split("-")[0])
|
||||||
}
|
}
|
||||||
preferred_captions.sort_by! { |caption|
|
preferred_captions.sort_by! { |caption|
|
||||||
(params.preferred_captions.index(caption.name) ||
|
(params.preferred_captions.index(caption.name) ||
|
||||||
params.preferred_captions.index(caption.languageCode.split("-")[0])).not_nil!
|
params.preferred_captions.index(caption.language_code.split("-")[0])).not_nil!
|
||||||
}
|
}
|
||||||
captions = captions - preferred_captions
|
captions = captions - preferred_captions
|
||||||
|
|
||||||
|
@ -395,7 +395,7 @@ module Invidious::Routes::Login
|
|||||||
return templated "login"
|
return templated "login"
|
||||||
end
|
end
|
||||||
|
|
||||||
tokens = env.params.body.select { |k, v| k.match(/^token\[\d+\]$/) }.map { |k, v| v }
|
tokens = env.params.body.select { |k, _| k.match(/^token\[\d+\]$/) }.map { |_, v| v }
|
||||||
|
|
||||||
answer ||= ""
|
answer ||= ""
|
||||||
captcha_type ||= "image"
|
captcha_type ||= "image"
|
||||||
@ -419,7 +419,7 @@ module Invidious::Routes::Login
|
|||||||
|
|
||||||
found_valid_captcha = false
|
found_valid_captcha = false
|
||||||
error_exception = Exception.new
|
error_exception = Exception.new
|
||||||
tokens.each_with_index do |token, i|
|
tokens.each do |token|
|
||||||
begin
|
begin
|
||||||
validate_request(token, answer, env.request, HMAC_KEY, PG_DB, locale)
|
validate_request(token, answer, env.request, HMAC_KEY, PG_DB, locale)
|
||||||
found_valid_captcha = true
|
found_valid_captcha = true
|
||||||
|
@ -245,7 +245,7 @@ module Invidious::Routes::Playlists
|
|||||||
if query
|
if query
|
||||||
begin
|
begin
|
||||||
search_query, count, items, operators = process_search_query(query, page, user, region: nil)
|
search_query, count, items, operators = process_search_query(query, page, user, region: nil)
|
||||||
videos = items.select { |item| item.is_a? SearchVideo }.map { |item| item.as(SearchVideo) }
|
videos = items.select(SearchVideo).map(&.as(SearchVideo))
|
||||||
rescue ex
|
rescue ex
|
||||||
videos = [] of SearchVideo
|
videos = [] of SearchVideo
|
||||||
count = 0
|
count = 0
|
||||||
|
@ -20,7 +20,7 @@ module Invidious::Routes::VideoPlayback
|
|||||||
host = "https://r#{fvip}---#{mns.pop}.googlevideo.com"
|
host = "https://r#{fvip}---#{mns.pop}.googlevideo.com"
|
||||||
end
|
end
|
||||||
|
|
||||||
url = "/videoplayback?#{query_params.to_s}"
|
url = "/videoplayback?#{query_params}"
|
||||||
|
|
||||||
headers = HTTP::Headers.new
|
headers = HTTP::Headers.new
|
||||||
REQUEST_HEADERS_WHITELIST.each do |header|
|
REQUEST_HEADERS_WHITELIST.each do |header|
|
||||||
|
@ -153,11 +153,11 @@ module Invidious::Routes::Watch
|
|||||||
|
|
||||||
preferred_captions = captions.select { |caption|
|
preferred_captions = captions.select { |caption|
|
||||||
params.preferred_captions.includes?(caption.name) ||
|
params.preferred_captions.includes?(caption.name) ||
|
||||||
params.preferred_captions.includes?(caption.languageCode.split("-")[0])
|
params.preferred_captions.includes?(caption.language_code.split("-")[0])
|
||||||
}
|
}
|
||||||
preferred_captions.sort_by! { |caption|
|
preferred_captions.sort_by! { |caption|
|
||||||
(params.preferred_captions.index(caption.name) ||
|
(params.preferred_captions.index(caption.name) ||
|
||||||
params.preferred_captions.index(caption.languageCode.split("-")[0])).not_nil!
|
params.preferred_captions.index(caption.language_code.split("-")[0])).not_nil!
|
||||||
}
|
}
|
||||||
captions = captions - preferred_captions
|
captions = captions - preferred_captions
|
||||||
|
|
||||||
|
@ -14,13 +14,13 @@ def channel_search(query, page, channel)
|
|||||||
continuation = produce_channel_search_continuation(ucid, query, page)
|
continuation = produce_channel_search_continuation(ucid, query, page)
|
||||||
response_json = YoutubeAPI.browse(continuation)
|
response_json = YoutubeAPI.browse(continuation)
|
||||||
|
|
||||||
continuationItems = response_json["onResponseReceivedActions"]?
|
continuation_items = response_json["onResponseReceivedActions"]?
|
||||||
.try &.[0]["appendContinuationItemsAction"]["continuationItems"]
|
.try &.[0]["appendContinuationItemsAction"]["continuationItems"]
|
||||||
|
|
||||||
return 0, [] of SearchItem if !continuationItems
|
return 0, [] of SearchItem if !continuation_items
|
||||||
|
|
||||||
items = [] of SearchItem
|
items = [] of SearchItem
|
||||||
continuationItems.as_a.select(&.as_h.has_key?("itemSectionRenderer")).each { |item|
|
continuation_items.as_a.select(&.as_h.has_key?("itemSectionRenderer")).each { |item|
|
||||||
extract_item(item["itemSectionRenderer"]["contents"].as_a[0])
|
extract_item(item["itemSectionRenderer"]["contents"].as_a[0])
|
||||||
.try { |t| items << t }
|
.try { |t| items << t }
|
||||||
}
|
}
|
||||||
@ -128,7 +128,7 @@ def produce_search_params(page = 1, sort : String = "relevance", date : String =
|
|||||||
object.delete("2:embedded")
|
object.delete("2:embedded")
|
||||||
end
|
end
|
||||||
|
|
||||||
params = object.try { |i| Protodec::Any.cast_json(object) }
|
params = object.try { |i| Protodec::Any.cast_json(i) }
|
||||||
.try { |i| Protodec::Any.from_json(i) }
|
.try { |i| Protodec::Any.from_json(i) }
|
||||||
.try { |i| Base64.urlsafe_encode(i) }
|
.try { |i| Base64.urlsafe_encode(i) }
|
||||||
.try { |i| URI.encode_www_form(i) }
|
.try { |i| URI.encode_www_form(i) }
|
||||||
@ -161,7 +161,7 @@ def produce_channel_search_continuation(ucid, query, page)
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
continuation = object.try { |i| Protodec::Any.cast_json(object) }
|
continuation = object.try { |i| Protodec::Any.cast_json(i) }
|
||||||
.try { |i| Protodec::Any.from_json(i) }
|
.try { |i| Protodec::Any.from_json(i) }
|
||||||
.try { |i| Base64.urlsafe_encode(i) }
|
.try { |i| Base64.urlsafe_encode(i) }
|
||||||
.try { |i| URI.encode_www_form(i) }
|
.try { |i| URI.encode_www_form(i) }
|
||||||
@ -183,7 +183,7 @@ def process_search_query(query, page, user, region)
|
|||||||
sort = "relevance"
|
sort = "relevance"
|
||||||
subscriptions = nil
|
subscriptions = nil
|
||||||
|
|
||||||
operators = query.split(" ").select { |a| a.match(/\w+:[\w,]+/) }
|
operators = query.split(" ").select(&.match(/\w+:[\w,]+/))
|
||||||
operators.each do |operator|
|
operators.each do |operator|
|
||||||
key, value = operator.downcase.split(":")
|
key, value = operator.downcase.split(":")
|
||||||
|
|
||||||
|
@ -248,17 +248,17 @@ def get_subscription_feed(db, user, max_results = 40, page = 1)
|
|||||||
notifications = db.query_all("SELECT * FROM channel_videos WHERE id IN (#{args}) ORDER BY published DESC", args: notifications, as: ChannelVideo)
|
notifications = db.query_all("SELECT * FROM channel_videos WHERE id IN (#{args}) ORDER BY published DESC", args: notifications, as: ChannelVideo)
|
||||||
videos = [] of ChannelVideo
|
videos = [] of ChannelVideo
|
||||||
|
|
||||||
notifications.sort_by! { |video| video.published }.reverse!
|
notifications.sort_by!(&.published).reverse!
|
||||||
|
|
||||||
case user.preferences.sort
|
case user.preferences.sort
|
||||||
when "alphabetically"
|
when "alphabetically"
|
||||||
notifications.sort_by! { |video| video.title }
|
notifications.sort_by!(&.title)
|
||||||
when "alphabetically - reverse"
|
when "alphabetically - reverse"
|
||||||
notifications.sort_by! { |video| video.title }.reverse!
|
notifications.sort_by!(&.title).reverse!
|
||||||
when "channel name"
|
when "channel name"
|
||||||
notifications.sort_by! { |video| video.author }
|
notifications.sort_by!(&.author)
|
||||||
when "channel name - reverse"
|
when "channel name - reverse"
|
||||||
notifications.sort_by! { |video| video.author }.reverse!
|
notifications.sort_by!(&.author).reverse!
|
||||||
else nil # Ignore
|
else nil # Ignore
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
@ -279,7 +279,7 @@ def get_subscription_feed(db, user, max_results = 40, page = 1)
|
|||||||
videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM #{view_name} ORDER BY ucid, published DESC", as: ChannelVideo)
|
videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM #{view_name} ORDER BY ucid, published DESC", as: ChannelVideo)
|
||||||
end
|
end
|
||||||
|
|
||||||
videos.sort_by! { |video| video.published }.reverse!
|
videos.sort_by!(&.published).reverse!
|
||||||
else
|
else
|
||||||
if user.preferences.unseen_only
|
if user.preferences.unseen_only
|
||||||
# Only show unwatched
|
# Only show unwatched
|
||||||
@ -299,15 +299,15 @@ def get_subscription_feed(db, user, max_results = 40, page = 1)
|
|||||||
|
|
||||||
case user.preferences.sort
|
case user.preferences.sort
|
||||||
when "published - reverse"
|
when "published - reverse"
|
||||||
videos.sort_by! { |video| video.published }
|
videos.sort_by!(&.published)
|
||||||
when "alphabetically"
|
when "alphabetically"
|
||||||
videos.sort_by! { |video| video.title }
|
videos.sort_by!(&.title)
|
||||||
when "alphabetically - reverse"
|
when "alphabetically - reverse"
|
||||||
videos.sort_by! { |video| video.title }.reverse!
|
videos.sort_by!(&.title).reverse!
|
||||||
when "channel name"
|
when "channel name"
|
||||||
videos.sort_by! { |video| video.author }
|
videos.sort_by!(&.author)
|
||||||
when "channel name - reverse"
|
when "channel name - reverse"
|
||||||
videos.sort_by! { |video| video.author }.reverse!
|
videos.sort_by!(&.author).reverse!
|
||||||
else nil # Ignore
|
else nil # Ignore
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -426,7 +426,7 @@ struct Video
|
|||||||
self.captions.each do |caption|
|
self.captions.each do |caption|
|
||||||
json.object do
|
json.object do
|
||||||
json.field "label", caption.name
|
json.field "label", caption.name
|
||||||
json.field "languageCode", caption.languageCode
|
json.field "language_code", caption.language_code
|
||||||
json.field "url", "/api/v1/captions/#{id}?label=#{URI.encode_www_form(caption.name)}"
|
json.field "url", "/api/v1/captions/#{id}?label=#{URI.encode_www_form(caption.name)}"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -703,10 +703,10 @@ struct Video
|
|||||||
return @captions.as(Array(Caption)) if @captions
|
return @captions.as(Array(Caption)) if @captions
|
||||||
captions = info["captions"]?.try &.["playerCaptionsTracklistRenderer"]?.try &.["captionTracks"]?.try &.as_a.map do |caption|
|
captions = info["captions"]?.try &.["playerCaptionsTracklistRenderer"]?.try &.["captionTracks"]?.try &.as_a.map do |caption|
|
||||||
name = caption["name"]["simpleText"]? || caption["name"]["runs"][0]["text"]
|
name = caption["name"]["simpleText"]? || caption["name"]["runs"][0]["text"]
|
||||||
languageCode = caption["languageCode"].to_s
|
language_code = caption["languageCode"].to_s
|
||||||
baseUrl = caption["baseUrl"].to_s
|
base_url = caption["baseUrl"].to_s
|
||||||
|
|
||||||
caption = Caption.new(name.to_s, languageCode, baseUrl)
|
caption = Caption.new(name.to_s, language_code, base_url)
|
||||||
caption.name = caption.name.split(" - ")[0]
|
caption.name = caption.name.split(" - ")[0]
|
||||||
caption
|
caption
|
||||||
end
|
end
|
||||||
@ -785,16 +785,16 @@ end
|
|||||||
|
|
||||||
struct Caption
|
struct Caption
|
||||||
property name
|
property name
|
||||||
property languageCode
|
property language_code
|
||||||
property baseUrl
|
property base_url
|
||||||
|
|
||||||
getter name : String
|
getter name : String
|
||||||
getter languageCode : String
|
getter language_code : String
|
||||||
getter baseUrl : String
|
getter base_url : String
|
||||||
|
|
||||||
setter name
|
setter name
|
||||||
|
|
||||||
def initialize(@name, @languageCode, @baseUrl)
|
def initialize(@name, @language_code, @base_url)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -880,7 +880,7 @@ def extract_video_info(video_id : String, proxy_region : String? = nil, context_
|
|||||||
|
|
||||||
primary_results = player_response.try &.["contents"]?.try &.["twoColumnWatchNextResults"]?.try &.["results"]?
|
primary_results = player_response.try &.["contents"]?.try &.["twoColumnWatchNextResults"]?.try &.["results"]?
|
||||||
.try &.["results"]?.try &.["contents"]?
|
.try &.["results"]?.try &.["contents"]?
|
||||||
sentiment_bar = primary_results.try &.as_a.select { |object| object["videoPrimaryInfoRenderer"]? }[0]?
|
sentiment_bar = primary_results.try &.as_a.select(&.["videoPrimaryInfoRenderer"]?)[0]?
|
||||||
.try &.["videoPrimaryInfoRenderer"]?
|
.try &.["videoPrimaryInfoRenderer"]?
|
||||||
.try &.["sentimentBar"]?
|
.try &.["sentimentBar"]?
|
||||||
.try &.["sentimentBarRenderer"]?
|
.try &.["sentimentBarRenderer"]?
|
||||||
@ -891,11 +891,11 @@ def extract_video_info(video_id : String, proxy_region : String? = nil, context_
|
|||||||
params["likes"] = JSON::Any.new(likes)
|
params["likes"] = JSON::Any.new(likes)
|
||||||
params["dislikes"] = JSON::Any.new(dislikes)
|
params["dislikes"] = JSON::Any.new(dislikes)
|
||||||
|
|
||||||
params["descriptionHtml"] = JSON::Any.new(primary_results.try &.as_a.select { |object| object["videoSecondaryInfoRenderer"]? }[0]?
|
params["descriptionHtml"] = JSON::Any.new(primary_results.try &.as_a.select(&.["videoSecondaryInfoRenderer"]?)[0]?
|
||||||
.try &.["videoSecondaryInfoRenderer"]?.try &.["description"]?.try &.["runs"]?
|
.try &.["videoSecondaryInfoRenderer"]?.try &.["description"]?.try &.["runs"]?
|
||||||
.try &.as_a.try { |t| content_to_comment_html(t).gsub("\n", "<br/>") } || "<p></p>")
|
.try &.as_a.try { |t| content_to_comment_html(t).gsub("\n", "<br/>") } || "<p></p>")
|
||||||
|
|
||||||
metadata = primary_results.try &.as_a.select { |object| object["videoSecondaryInfoRenderer"]? }[0]?
|
metadata = primary_results.try &.as_a.select(&.["videoSecondaryInfoRenderer"]?)[0]?
|
||||||
.try &.["videoSecondaryInfoRenderer"]?
|
.try &.["videoSecondaryInfoRenderer"]?
|
||||||
.try &.["metadataRowContainer"]?
|
.try &.["metadataRowContainer"]?
|
||||||
.try &.["metadataRowContainerRenderer"]?
|
.try &.["metadataRowContainerRenderer"]?
|
||||||
@ -928,7 +928,7 @@ def extract_video_info(video_id : String, proxy_region : String? = nil, context_
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
author_info = primary_results.try &.as_a.select { |object| object["videoSecondaryInfoRenderer"]? }[0]?
|
author_info = primary_results.try &.as_a.select(&.["videoSecondaryInfoRenderer"]?)[0]?
|
||||||
.try &.["videoSecondaryInfoRenderer"]?.try &.["owner"]?.try &.["videoOwnerRenderer"]?
|
.try &.["videoSecondaryInfoRenderer"]?.try &.["owner"]?.try &.["videoOwnerRenderer"]?
|
||||||
|
|
||||||
params["authorThumbnail"] = JSON::Any.new(author_info.try &.["thumbnail"]?
|
params["authorThumbnail"] = JSON::Any.new(author_info.try &.["thumbnail"]?
|
||||||
@ -1023,13 +1023,13 @@ end
|
|||||||
def process_video_params(query, preferences)
|
def process_video_params(query, preferences)
|
||||||
annotations = query["iv_load_policy"]?.try &.to_i?
|
annotations = query["iv_load_policy"]?.try &.to_i?
|
||||||
autoplay = query["autoplay"]?.try { |q| (q == "true" || q == "1").to_unsafe }
|
autoplay = query["autoplay"]?.try { |q| (q == "true" || q == "1").to_unsafe }
|
||||||
comments = query["comments"]?.try &.split(",").map { |a| a.downcase }
|
comments = query["comments"]?.try &.split(",").map(&.downcase)
|
||||||
continue = query["continue"]?.try { |q| (q == "true" || q == "1").to_unsafe }
|
continue = query["continue"]?.try { |q| (q == "true" || q == "1").to_unsafe }
|
||||||
continue_autoplay = query["continue_autoplay"]?.try { |q| (q == "true" || q == "1").to_unsafe }
|
continue_autoplay = query["continue_autoplay"]?.try { |q| (q == "true" || q == "1").to_unsafe }
|
||||||
listen = query["listen"]?.try { |q| (q == "true" || q == "1").to_unsafe }
|
listen = query["listen"]?.try { |q| (q == "true" || q == "1").to_unsafe }
|
||||||
local = query["local"]?.try { |q| (q == "true" || q == "1").to_unsafe }
|
local = query["local"]?.try { |q| (q == "true" || q == "1").to_unsafe }
|
||||||
player_style = query["player_style"]?
|
player_style = query["player_style"]?
|
||||||
preferred_captions = query["subtitles"]?.try &.split(",").map { |a| a.downcase }
|
preferred_captions = query["subtitles"]?.try &.split(",").map(&.downcase)
|
||||||
quality = query["quality"]?
|
quality = query["quality"]?
|
||||||
quality_dash = query["quality_dash"]?
|
quality_dash = query["quality_dash"]?
|
||||||
region = query["region"]?
|
region = query["region"]?
|
||||||
|
@ -184,7 +184,7 @@ we're going to need to do it here in order to allow for translations.
|
|||||||
</option>
|
</option>
|
||||||
<% end %>
|
<% end %>
|
||||||
<% captions.each do |caption| %>
|
<% captions.each do |caption| %>
|
||||||
<option value='{"id":"<%= video.id %>","label":"<%= caption.name %>","title":"<%= URI.encode_www_form(video.title) %>-<%= video.id %>.<%= caption.languageCode %>.vtt"}'>
|
<option value='{"id":"<%= video.id %>","label":"<%= caption.name %>","title":"<%= URI.encode_www_form(video.title) %>-<%= video.id %>.<%= caption.language_code %>.vtt"}'>
|
||||||
<%= translate(locale, "Subtitles - `x` (.vtt)", caption.name) %>
|
<%= translate(locale, "Subtitles - `x` (.vtt)", caption.name) %>
|
||||||
</option>
|
</option>
|
||||||
<% end %>
|
<% end %>
|
||||||
|
@ -40,7 +40,7 @@ def extract_videos(initial_data : Hash(String, JSON::Any), author_fallback : Str
|
|||||||
target << i
|
target << i
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
return target.select(&.is_a?(SearchVideo)).map(&.as(SearchVideo))
|
return target.select(SearchVideo).map(&.as(SearchVideo))
|
||||||
end
|
end
|
||||||
|
|
||||||
def extract_selected_tab(tabs)
|
def extract_selected_tab(tabs)
|
||||||
|
@ -236,7 +236,7 @@ def get_spys_proxies(country_code = "US")
|
|||||||
proxies << {ip: ip, port: port, score: score}
|
proxies << {ip: ip, port: port, score: score}
|
||||||
end
|
end
|
||||||
|
|
||||||
proxies = proxies.sort_by { |proxy| proxy[:score] }.reverse
|
proxies = proxies.sort_by!(&.[:score]).reverse!
|
||||||
return proxies
|
return proxies
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -256,7 +256,7 @@ def decrypt_port(p, x)
|
|||||||
p = p.gsub(/\b\w+\b/, x)
|
p = p.gsub(/\b\w+\b/, x)
|
||||||
|
|
||||||
p = p.split(";")
|
p = p.split(";")
|
||||||
p = p.map { |item| item.split("=") }
|
p = p.map(&.split("="))
|
||||||
|
|
||||||
mapping = {} of String => Int32
|
mapping = {} of String => Int32
|
||||||
p.each do |item|
|
p.each do |item|
|
||||||
|
@ -410,8 +410,8 @@ module YoutubeAPI
|
|||||||
|
|
||||||
# Logging
|
# Logging
|
||||||
LOGGER.debug("YoutubeAPI: Using endpoint: \"#{endpoint}\"")
|
LOGGER.debug("YoutubeAPI: Using endpoint: \"#{endpoint}\"")
|
||||||
LOGGER.trace("YoutubeAPI: ClientConfig: #{client_config.to_s}")
|
LOGGER.trace("YoutubeAPI: ClientConfig: #{client_config}")
|
||||||
LOGGER.trace("YoutubeAPI: POST data: #{data.to_s}")
|
LOGGER.trace("YoutubeAPI: POST data: #{data}")
|
||||||
|
|
||||||
# Send the POST request
|
# Send the POST request
|
||||||
if client_config.proxy_region
|
if client_config.proxy_region
|
||||||
@ -436,7 +436,7 @@ module YoutubeAPI
|
|||||||
# Logging
|
# Logging
|
||||||
LOGGER.error("YoutubeAPI: Got error #{code} when requesting #{endpoint}")
|
LOGGER.error("YoutubeAPI: Got error #{code} when requesting #{endpoint}")
|
||||||
LOGGER.error("YoutubeAPI: #{message}")
|
LOGGER.error("YoutubeAPI: #{message}")
|
||||||
LOGGER.info("YoutubeAPI: POST data was: #{data.to_s}")
|
LOGGER.info("YoutubeAPI: POST data was: #{data}")
|
||||||
|
|
||||||
raise InfoException.new("Could not extract JSON. Youtube API returned \
|
raise InfoException.new("Could not extract JSON. Youtube API returned \
|
||||||
error #{code} with message:<br>\"#{message}\"")
|
error #{code} with message:<br>\"#{message}\"")
|
||||||
|
Loading…
Reference in New Issue
Block a user