diff --git a/src/helpers.cr b/src/helpers.cr
index ca01400d..b3625b67 100644
--- a/src/helpers.cr
+++ b/src/helpers.cr
@@ -46,6 +46,36 @@ class Video
})
end
+class RedditSubmit
+ JSON.mapping({
+ data: RedditSubmitData,
+ })
+end
+
+class RedditSubmitData
+ JSON.mapping({
+ children: Array(RedditThread),
+ })
+end
+
+class RedditThread
+ JSON.mapping({
+ data: RedditThreadData,
+ })
+end
+
+class RedditThreadData
+ JSON.mapping({
+ subreddit: String,
+ id: String,
+ num_comments: Int32,
+ score: Int32,
+ author: String,
+ permalink: String,
+ title: String,
+ })
+end
+
# See http://www.evanmiller.org/how-not-to-sort-by-average-rating.html
def ci_lower_bound(pos, n)
if n == 0
@@ -226,3 +256,67 @@ def make_client(url, context)
client.connect_timeout = 10.seconds
return client
end
+
+def get_reddit_comments(id, client)
+ youtube_url = URI.escape("https://youtube.com/watch?v=#{id}")
+ search_results = client.get("/submit.json?url=#{youtube_url}").body
+ search_results = RedditSubmit.from_json(search_results)
+
+ top_reddit_thread = search_results.data.children.sort_by { |child| child.data.score }[-1]
+
+ comments = client.get("/r/#{top_reddit_thread.data.subreddit}/comments/#{top_reddit_thread.data.id}?sort=top&depth=3").body
+ comments = JSON.parse(comments)
+
+ return comments[1]["data"]["children"], top_reddit_thread
+end
+
+def template_comments(root)
+ html = ""
+ root.each do |child|
+ if child["data"]["body_html"]?
+ author = child["data"]["author"]
+ score = child["data"]["score"]
+ body_html = HTML.unescape(child["data"]["body_html"].as_s)
+
+ replies_html = ""
+ if child["data"]["replies"] != ""
+ replies_html = template_comments(child["data"]["replies"]["data"]["children"])
+ end
+
+ # TODO: Allow for expanding comments instead of just dismissing them
+
+ content = <<-END_HTML
+
+ [ - ]
+ #{score}
+ #{author}
+
+ #{body_html}
+ #{replies_html}
+ END_HTML
+
+ if child["data"]["depth"].as_i > 0
+ html += <<-END_HTML
+
+ END_HTML
+ else
+ html += <<-END_HTML
+
+ END_HTML
+ end
+ end
+ end
+
+ html = html.gsub(/(https:\/\/)|(http:\/\/)?(www\.)?(youtube\.com)/, "")
+
+ return html
+end
diff --git a/src/invidious.cr b/src/invidious.cr
index 96e1eb65..7d870756 100644
--- a/src/invidious.cr
+++ b/src/invidious.cr
@@ -37,7 +37,7 @@ end
Kemal::CLI.new
PG_DB = DB.open "postgres://kemal:kemal@localhost:5432/invidious"
-URL = URI.parse("https://www.youtube.com")
+YT_URL = URI.parse("https://www.youtube.com")
CONTEXT = OpenSSL::SSL::Context::Client.new
CONTEXT.verify_mode = OpenSSL::SSL::VerifyMode::NONE
CONTEXT.add_options(
@@ -45,26 +45,29 @@ CONTEXT.add_options(
OpenSSL::SSL::Options::NO_SSL_V2 |
OpenSSL::SSL::Options::NO_SSL_V3
)
-pool = Deque.new((threads * 1.2 + 1).to_i) do
- make_client(URL, CONTEXT)
+youtube_pool = Deque.new((threads * 1.2 + 1).to_i) do
+ make_client(YT_URL, CONTEXT)
+end
+reddit_pool = Deque.new((threads * 1.2 + 1).to_i) do
+ make_client(URI.parse("https://api.reddit.com"), CONTEXT)
end
-# Refresh pool by crawling YT
+# Refresh youtube_pool by crawling YT
threads.times do
spawn do
io = STDOUT
ids = Deque(String).new
random = Random.new
- client = get_client(pool)
+ client = get_client(youtube_pool)
search(random.base64(3), client) do |id|
ids << id
end
- pool << client
+ youtube_pool << client
loop do
- client = get_client(pool)
+ yt_client = get_client(youtube_pool)
if ids.empty?
search(random.base64(3), client) do |id|
@@ -73,8 +76,8 @@ threads.times do
end
if rand(300) < 1
- pool << make_client(URL, CONTEXT)
- client = get_client(pool)
+ youtube_pool << make_client(YT_URL, CONTEXT)
+ yt_client = get_client(youtube_pool)
end
begin
@@ -82,7 +85,7 @@ threads.times do
video = get_video(id, client, PG_DB)
rescue ex
io << id << " : " << ex.message << "\n"
- pool << make_client(URL, CONTEXT)
+ youtube_pool << make_client(YT_URL, CONTEXT)
next
ensure
ids.delete(id)
@@ -105,7 +108,20 @@ threads.times do
end
end
- pool << client
+ youtube_pool << client
+ end
+ end
+end
+
+threads.times do
+ spawn do
+ loop do
+ client = get_client(reddit_pool)
+
+ client.get("/")
+ sleep 10.seconds
+
+ reddit_pool << client
end
end
end
@@ -115,7 +131,7 @@ top_videos = [] of Video
spawn do
loop do
top = rank_videos(PG_DB, 40)
- client = get_client(pool)
+ client = get_client(youtube_pool)
args = [] of String
if top.size > 0
@@ -137,7 +153,7 @@ spawn do
top_videos = videos
- pool << client
+ youtube_pool << client
end
end
@@ -163,9 +179,9 @@ get "/watch" do |env|
env.params.query.delete_all("listen")
end
- client = get_client(pool)
+ yt_client = get_client(youtube_pool)
begin
- video = get_video(id, client, PG_DB)
+ video = get_video(id, yt_client, PG_DB)
rescue ex
error_message = ex.message
next templated "error"
@@ -220,7 +236,17 @@ get "/watch" do |env|
calculated_rating = 0.0
end
- pool << client
+ reddit_client = get_client(reddit_pool)
+
+ begin
+ reddit_comments, reddit_thread = get_reddit_comments(id, reddit_client)
+ rescue ex
+ reddit_comments = JSON.parse("[]")
+ reddit_thread = nil
+ end
+
+ reddit_pool << reddit_client
+ youtube_pool << yt_client
templated "watch"
end
@@ -235,7 +261,7 @@ get "/search" do |env|
page = env.params.query["page"]? && env.params.query["page"].to_i? ? env.params.query["page"].to_i : 1
- client = get_client(pool)
+ client = get_client(youtube_pool)
html = client.get("https://www.youtube.com/results?q=#{URI.escape(query)}&page=#{page}&sp=EgIQAVAU").body
html = XML.parse_html(html)
@@ -286,7 +312,7 @@ get "/search" do |env|
end
end
- pool << client
+ youtube_pool << client
templated "search"
end
diff --git a/src/views/watch.ecr b/src/views/watch.ecr
index 28f57203..1e15c156 100644
--- a/src/views/watch.ecr
+++ b/src/views/watch.ecr
@@ -60,6 +60,10 @@ var player = videojs('player', options, function() {
}
});
});
+
+function dismiss(target) {
+ target.style.display = 'none';
+}
@@ -94,6 +98,16 @@ var player = videojs('player', options, function() {
<%= video.description %>
+ <% if reddit_thread && !reddit_comments.as_a.empty? %>
+
+
+ <% end %>