summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--assets/css/default.css21
-rw-r--r--assets/js/sse.js2
-rw-r--r--docker-compose.yml2
-rw-r--r--locales/ar.json3
-rw-r--r--locales/de.json3
-rw-r--r--locales/el.json1
-rw-r--r--locales/en-US.json1
-rw-r--r--locales/eo.json3
-rw-r--r--locales/es.json1
-rw-r--r--locales/eu.json1
-rw-r--r--locales/fr.json1
-rw-r--r--locales/it.json1
-rw-r--r--locales/nb_NO.json1
-rw-r--r--locales/nl.json1
-rw-r--r--locales/pl.json1
-rw-r--r--locales/ru.json3
-rw-r--r--locales/uk.json3
-rw-r--r--src/invidious.cr380
-rw-r--r--src/invidious/channels.cr345
-rw-r--r--src/invidious/comments.cr211
-rw-r--r--src/invidious/helpers/helpers.cr6
-rw-r--r--src/invidious/helpers/static_file_handler.cr194
-rw-r--r--src/invidious/helpers/utils.cr21
-rw-r--r--src/invidious/search.cr4
-rw-r--r--src/invidious/trending.cr2
-rw-r--r--src/invidious/videos.cr18
-rw-r--r--src/invidious/views/channel.ecr39
-rw-r--r--src/invidious/views/playlists.ecr33
-rw-r--r--src/invidious/views/watch.ecr15
29 files changed, 948 insertions, 369 deletions
diff --git a/assets/css/default.css b/assets/css/default.css
index 2f875019..46f1d58f 100644
--- a/assets/css/default.css
+++ b/assets/css/default.css
@@ -2,6 +2,17 @@
background-color: rgb(255, 0, 0, 0.5);
}
+.channel-profile > * {
+ font-size: 1.17em;
+ font-weight: bold;
+ vertical-align: middle;
+}
+
+.channel-profile > img {
+ width: 48px;
+ height: auto;
+}
+
.channel-owner {
background-color: #008bec;
color: #fff;
@@ -270,6 +281,16 @@ input[type="search"]::-webkit-search-cancel-button {
}
}
+.vjs-user-inactive {
+ cursor: none;
+}
+
+.video-js .vjs-text-track-display > div > div > div {
+ background-color: rgba(0, 0, 0, 0.75) !important;
+ border-radius: 9px !important;
+ padding: 5px !important;
+}
+
.vjs-play-control,
.vjs-volume-panel,
.vjs-current-time,
diff --git a/assets/js/sse.js b/assets/js/sse.js
index 3601b5af..4f7320b3 100644
--- a/assets/js/sse.js
+++ b/assets/js/sse.js
@@ -94,7 +94,7 @@ var SSE = function (url, options) {
}
this._onStreamProgress = function(e) {
- if (this.xhr.status !== 200) {
+ if (this.xhr.status !== 200 && this.readyState !== this.CLOSED) {
this._onStreamFailure(e);
return;
}
diff --git a/docker-compose.yml b/docker-compose.yml
index 2d08e3ff..be8347fd 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -13,7 +13,7 @@ services:
dockerfile: docker/Dockerfile
restart: unless-stopped
ports:
- - "3000:3000"
+ - "127.0.0.1:3000:3000"
depends_on:
- postgres
diff --git a/locales/ar.json b/locales/ar.json
index cc24da4a..0e89bd42 100644
--- a/locales/ar.json
+++ b/locales/ar.json
@@ -310,10 +310,11 @@
"%A %B %-d, %Y": "",
"(edited)": "(تم تعديلة)",
"YouTube comment permalink": "رابط التعليق على اليوتيوب",
+ "permalink": "",
"`x` marked it with a ❤": "`x` اعجب بهذا",
"Audio mode": "الوضع الصوتى",
"Video mode": "وضع الفيديو",
"Videos": "الفيديوهات",
"Playlists": "قوائم التشغيل",
"Current version: ": "الإصدار الحالى"
-}
+} \ No newline at end of file
diff --git a/locales/de.json b/locales/de.json
index 3adbaec2..8bd91473 100644
--- a/locales/de.json
+++ b/locales/de.json
@@ -310,10 +310,11 @@
"%A %B %-d, %Y": "%A %B %-d, %Y",
"(edited)": "(editiert)",
"YouTube comment permalink": "YouTube-Kommentar Permalink",
+ "permalink": "",
"`x` marked it with a ❤": "`x` markierte es mit einem ❤",
"Audio mode": "Audiomodus",
"Video mode": "Videomodus",
"Videos": "Videos",
"Playlists": "Wiedergabelisten",
"Current version: ": "Aktuelle Version: "
-}
+} \ No newline at end of file
diff --git a/locales/el.json b/locales/el.json
index 3ed49c67..0012f314 100644
--- a/locales/el.json
+++ b/locales/el.json
@@ -355,6 +355,7 @@
"%A %B %-d, %Y": "%A %B %-d, %Y",
"(edited)": "(τροποποιημένο)",
"YouTube comment permalink": "Σύνδεσμος YouTube σχολίου",
+ "permalink": "",
"`x` marked it with a ❤": "Ο χρηστης `x` έβαλε ❤",
"Audio mode": "Λειτουργία ήχου",
"Video mode": "Λειτουργία βίντεο",
diff --git a/locales/en-US.json b/locales/en-US.json
index 9b786260..05f01819 100644
--- a/locales/en-US.json
+++ b/locales/en-US.json
@@ -355,6 +355,7 @@
"%A %B %-d, %Y": "%A %B %-d, %Y",
"(edited)": "(edited)",
"YouTube comment permalink": "YouTube comment permalink",
+ "permalink": "",
"`x` marked it with a ❤": "`x` marked it with a ❤",
"Audio mode": "Audio mode",
"Video mode": "Video mode",
diff --git a/locales/eo.json b/locales/eo.json
index 3b15f3e3..59d7229c 100644
--- a/locales/eo.json
+++ b/locales/eo.json
@@ -310,10 +310,11 @@
"%A %B %-d, %Y": "%A %-d de %B %Y",
"(edited)": "(redaktita)",
"YouTube comment permalink": "Fiksligilo de la komento en YouTube",
+ "permalink": "",
"`x` marked it with a ❤": "`x` markis ĝin per ❤",
"Audio mode": "Aŭda reĝimo",
"Video mode": "Videa reĝimo",
"Videos": "Videoj",
"Playlists": "Ludlistoj",
"Current version: ": "Nuna versio: "
-}
+} \ No newline at end of file
diff --git a/locales/es.json b/locales/es.json
index 10fbf5ca..394a3c31 100644
--- a/locales/es.json
+++ b/locales/es.json
@@ -310,6 +310,7 @@
"%A %B %-d, %Y": "%A %B %-d, %Y",
"(edited)": "(editado)",
"YouTube comment permalink": "Enlace permanente de YouTube del comentario",
+ "permalink": "",
"`x` marked it with a ❤": "`x` lo ha marcado con un ❤",
"Audio mode": "Modo de audio",
"Video mode": "Modo de vídeo",
diff --git a/locales/eu.json b/locales/eu.json
index 60fa6f6d..b9d2ebc3 100644
--- a/locales/eu.json
+++ b/locales/eu.json
@@ -309,6 +309,7 @@
"%A %B %-d, %Y": "",
"(edited)": "",
"YouTube comment permalink": "",
+ "permalink": "",
"`x` marked it with a ❤": "",
"Audio mode": "",
"Video mode": "",
diff --git a/locales/fr.json b/locales/fr.json
index 72b12e92..7c4c408c 100644
--- a/locales/fr.json
+++ b/locales/fr.json
@@ -310,6 +310,7 @@
"%A %B %-d, %Y": "%A %-d %B %Y",
"(edited)": "(modifié)",
"YouTube comment permalink": "Lien YouTube permanent vers le commentaire",
+ "permalink": "",
"`x` marked it with a ❤": "`x` l'a marqué d'un ❤",
"Audio mode": "Mode Audio",
"Video mode": "Mode Vidéo",
diff --git a/locales/it.json b/locales/it.json
index ce7800c3..1c07413d 100644
--- a/locales/it.json
+++ b/locales/it.json
@@ -309,6 +309,7 @@
"%A %B %-d, %Y": "%A %-d %B %Y",
"(edited)": "(modificato)",
"YouTube comment permalink": "Link permanente al commento di YouTube",
+ "permalink": "",
"`x` marked it with a ❤": "`x` l'ha contrassegnato con un ❤",
"Audio mode": "Modalità audio",
"Video mode": "Modalità video",
diff --git a/locales/nb_NO.json b/locales/nb_NO.json
index e33004cd..316a38ab 100644
--- a/locales/nb_NO.json
+++ b/locales/nb_NO.json
@@ -310,6 +310,7 @@
"%A %B %-d, %Y": "",
"(edited)": "(redigert)",
"YouTube comment permalink": "Permanent YouTube-lenke til innholdet",
+ "permalink": "",
"`x` marked it with a ❤": "`x` levnet et ❤",
"Audio mode": "Lydmodus",
"Video mode": "Video-modus",
diff --git a/locales/nl.json b/locales/nl.json
index d5a4907e..19413a4f 100644
--- a/locales/nl.json
+++ b/locales/nl.json
@@ -310,6 +310,7 @@
"%A %B %-d, %Y": "%A %B %-d, %Y",
"(edited)": "(bewerkt)",
"YouTube comment permalink": "Link naar YouTube-reactie",
+ "permalink": "",
"`x` marked it with a ❤": "`x` heeft dit gemarkeerd met ❤",
"Audio mode": "Audiomodus",
"Video mode": "Videomodus",
diff --git a/locales/pl.json b/locales/pl.json
index 52875f03..4f95bdbe 100644
--- a/locales/pl.json
+++ b/locales/pl.json
@@ -310,6 +310,7 @@
"%A %B %-d, %Y": "",
"(edited)": "(edytowany)",
"YouTube comment permalink": "Odnośnik bezpośredni do komentarza na YouTube",
+ "permalink": "",
"`x` marked it with a ❤": "`x` oznaczonych ❤",
"Audio mode": "Tryb audio",
"Video mode": "Tryb wideo",
diff --git a/locales/ru.json b/locales/ru.json
index 0bee2a39..a4f77c19 100644
--- a/locales/ru.json
+++ b/locales/ru.json
@@ -310,10 +310,11 @@
"%A %B %-d, %Y": "%-d %B %Y, %A",
"(edited)": "(изменено)",
"YouTube comment permalink": "Прямая ссылка на YouTube",
+ "permalink": "",
"`x` marked it with a ❤": "❤ от автора канала \"`x`\"",
"Audio mode": "Аудио режим",
"Video mode": "Видео режим",
"Videos": "Видео",
"Playlists": "Плейлисты",
"Current version: ": "Текущая версия: "
-}
+} \ No newline at end of file
diff --git a/locales/uk.json b/locales/uk.json
index c4632e9c..a260b694 100644
--- a/locales/uk.json
+++ b/locales/uk.json
@@ -310,10 +310,11 @@
"%A %B %-d, %Y": "%-d %B %Y, %A",
"(edited)": "(змінено)",
"YouTube comment permalink": "Пряме посилання на коментар в YouTube",
+ "permalink": "",
"`x` marked it with a ❤": "❤ цьому від каналу `x`",
"Audio mode": "Аудіорежим",
"Video mode": "Відеорежим",
"Videos": "Відео",
"Playlists": "Плейлисти",
"Current version: ": "Поточна версія: "
-}
+} \ No newline at end of file
diff --git a/src/invidious.cr b/src/invidious.cr
index 3331d09f..4c190249 100644
--- a/src/invidious.cr
+++ b/src/invidious.cr
@@ -40,20 +40,25 @@ PG_URL = URI.new(
path: CONFIG.db.dbname,
)
-PG_DB = DB.open PG_URL
-ARCHIVE_URL = URI.parse("https://archive.org")
-LOGIN_URL = URI.parse("https://accounts.google.com")
-PUBSUB_URL = URI.parse("https://pubsubhubbub.appspot.com")
-REDDIT_URL = URI.parse("https://www.reddit.com")
-TEXTCAPTCHA_URL = URI.parse("http://textcaptcha.com")
-YT_URL = URI.parse("https://www.youtube.com")
+PG_DB = DB.open PG_URL
+ARCHIVE_URL = URI.parse("https://archive.org")
+LOGIN_URL = URI.parse("https://accounts.google.com")
+PUBSUB_URL = URI.parse("https://pubsubhubbub.appspot.com")
+REDDIT_URL = URI.parse("https://www.reddit.com")
+TEXTCAPTCHA_URL = URI.parse("http://textcaptcha.com")
+YT_URL = URI.parse("https://www.youtube.com")
+
CHARS_SAFE = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_"
TEST_IDS = {"AgbeGFYluEA", "BaW_jenozKc", "a9LDPn-MO4I", "ddFvjfvPnqk", "iqKdEhx-dD4"}
-CURRENT_BRANCH = {{ "#{`git branch | sed -n '/\* /s///p'`.strip}" }}
-CURRENT_COMMIT = {{ "#{`git rev-list HEAD --max-count=1 --abbrev-commit`.strip}" }}
-CURRENT_VERSION = {{ "#{`git describe --tags --abbrev=0`.strip}" }}
MAX_ITEMS_PER_PAGE = 1500
+REQUEST_HEADERS_WHITELIST = {"Accept", "Accept-Encoding", "Cache-Control", "Connection", "Content-Length", "If-None-Match", "Range"}
+RESPONSE_HEADERS_BLACKLIST = {"Access-Control-Allow-Origin", "Alt-Svc", "Server"}
+
+CURRENT_BRANCH = {{ "#{`git branch | sed -n '/\* /s///p'`.strip}" }}
+CURRENT_COMMIT = {{ "#{`git rev-list HEAD --max-count=1 --abbrev-commit`.strip}" }}
+CURRENT_VERSION = {{ "#{`git describe --tags --abbrev=0`.strip}" }}
+
# This is used to determine the `?v=` on the end of file URLs (for cache busting). We
# only need to expire modified assets, so we can use this to find the last commit that changes
# any assets
@@ -117,6 +122,7 @@ Kemal::CLI.new ARGV
# Check table integrity
if CONFIG.check_tables
+ analyze_table(PG_DB, logger, "channels", InvidiousChannel)
analyze_table(PG_DB, logger, "channel_videos", ChannelVideo)
analyze_table(PG_DB, logger, "nonces", Nonce)
analyze_table(PG_DB, logger, "session_ids", SessionId)
@@ -204,8 +210,6 @@ spawn do
end
end
-proxies = PROXY_LIST
-
before_all do |env|
host_url = make_host_url(config, Kemal.config)
env.response.headers["X-XSS-Protection"] = "1; mode=block"
@@ -377,7 +381,7 @@ get "/watch" do |env|
env.params.query.delete_all("listen")
begin
- video = get_video(id, PG_DB, proxies, region: params.region)
+ video = get_video(id, PG_DB, region: params.region)
rescue ex : VideoRedirect
next env.redirect "/watch?v=#{ex.message}"
rescue ex
@@ -413,7 +417,7 @@ get "/watch" do |env|
if source == "youtube"
begin
- comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, proxies, "html", locale, preferences.thin_mode, region))["contentHtml"]
+ comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, "html", locale, preferences.thin_mode, region))["contentHtml"]
rescue ex
if preferences.comments[1] == "reddit"
comments, reddit_thread = fetch_reddit_comments(id)
@@ -432,12 +436,12 @@ get "/watch" do |env|
comment_html = replace_links(comment_html)
rescue ex
if preferences.comments[1] == "youtube"
- comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, proxies, "html", locale, preferences.thin_mode, region))["contentHtml"]
+ comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, "html", locale, preferences.thin_mode, region))["contentHtml"]
end
end
end
else
- comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, proxies, "html", locale, preferences.thin_mode, region))["contentHtml"]
+ comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, "html", locale, preferences.thin_mode, region))["contentHtml"]
end
comment_html ||= ""
@@ -478,8 +482,6 @@ get "/watch" do |env|
video.description_html = replace_links(video.description_html)
host_url = make_host_url(config, Kemal.config)
- host_params = env.request.query_params
- host_params.delete_all("v")
if video.player_response["streamingData"]?.try &.["hlsManifestUrl"]?
hlsvp = video.player_response["streamingData"]["hlsManifestUrl"].as_s
@@ -600,7 +602,7 @@ get "/embed/:id" do |env|
subscriptions ||= [] of String
begin
- video = get_video(id, PG_DB, proxies, region: params.region)
+ video = get_video(id, PG_DB, region: params.region)
rescue ex : VideoRedirect
next env.redirect "/embed/#{ex.message}"
rescue ex
@@ -654,8 +656,6 @@ get "/embed/:id" do |env|
video.description_html = replace_links(video.description_html)
host_url = make_host_url(config, Kemal.config)
- host_params = env.request.query_params
- host_params.delete_all("v")
if video.player_response["streamingData"]?.try &.["hlsManifestUrl"]?
hlsvp = video.player_response["streamingData"]["hlsManifestUrl"].as_s
@@ -853,7 +853,7 @@ get "/search" do |env|
next templated "error"
end
- count, videos = search(search_query, page, search_params, proxies, region).as(Tuple)
+ count, videos = search(search_query, page, search_params, region).as(Tuple)
end
templated "search"
@@ -2405,7 +2405,7 @@ get "/feed/trending" do |env|
region ||= "US"
begin
- trending, plid = fetch_trending(trending_type, proxies, region, locale)
+ trending, plid = fetch_trending(trending_type, region, locale)
rescue ex
error_message = "#{ex.message}"
env.response.status_code = 500
@@ -2498,7 +2498,7 @@ get "/feed/channel/:ucid" do |env|
ucid = env.params.url["ucid"]
begin
- author, ucid, auto_generated = get_about_info(ucid, locale)
+ channel = get_about_info(ucid, locale)
rescue ex
error_message = ex.message
env.response.status_code = 500
@@ -2506,7 +2506,7 @@ get "/feed/channel/:ucid" do |env|
end
client = make_client(YT_URL)
- rss = client.get("/feeds/videos.xml?channel_id=#{ucid}").body
+ rss = client.get("/feeds/videos.xml?channel_id=#{channel.ucid}").body
rss = XML.parse_html(rss)
videos = [] of SearchVideo
@@ -2546,18 +2546,18 @@ get "/feed/channel/:ucid" do |env|
"xmlns:media": "http://search.yahoo.com/mrss/", xmlns: "http://www.w3.org/2005/Atom",
"xml:lang": "en-US") do
xml.element("link", rel: "self", href: "#{host_url}#{env.request.resource}")
- xml.element("id") { xml.text "yt:channel:#{ucid}" }
- xml.element("yt:channelId") { xml.text ucid }
- xml.element("title") { xml.text author }
- xml.element("link", rel: "alternate", href: "#{host_url}/channel/#{ucid}")
+ xml.element("id") { xml.text "yt:channel:#{channel.ucid}" }
+ xml.element("yt:channelId") { xml.text channel.ucid }
+ xml.element("title") { xml.text channel.author }
+ xml.element("link", rel: "alternate", href: "#{host_url}/channel/#{channel.ucid}")
xml.element("author") do
- xml.element("name") { xml.text author }
- xml.element("uri") { xml.text "#{host_url}/channel/#{ucid}" }
+ xml.element("name") { xml.text channel.author }
+ xml.element("uri") { xml.text "#{host_url}/channel/#{channel.ucid}" }
end
videos.each do |video|
- video.to_xml(host_url, auto_generated, xml)
+ video.to_xml(host_url, channel.auto_generated, xml)
end
end
end
@@ -2719,7 +2719,7 @@ post "/feed/webhook/:token" do |env|
published = Time.parse_rfc3339(entry.xpath_node("published").not_nil!.content)
updated = Time.parse_rfc3339(entry.xpath_node("updated").not_nil!.content)
- video = get_video(id, PG_DB, proxies, force_refresh: true)
+ video = get_video(id, PG_DB, force_refresh: true)
# Deliver notifications to `/api/v1/auth/notifications`
payload = {
@@ -2882,22 +2882,18 @@ get "/channel/:ucid" do |env|
sort_by = env.params.query["sort_by"]?.try &.downcase
begin
- author, ucid, auto_generated, sub_count = get_about_info(ucid, locale)
+ channel = get_about_info(ucid, locale)
rescue ex
error_message = ex.message
env.response.status_code = 500
next templated "error"
end
- if !auto_generated
- env.set "search", "channel:#{ucid} "
- end
-
- if auto_generated
+ if channel.auto_generated
sort_options = {"last", "oldest", "newest"}
sort_by ||= "last"
- items, continuation = fetch_channel_playlists(ucid, author, auto_generated, continuation, sort_by)
+ items, continuation = fetch_channel_playlists(channel.ucid, channel.author, channel.auto_generated, continuation, sort_by)
items.uniq! do |item|
if item.responds_to?(:title)
item.title
@@ -2912,8 +2908,10 @@ get "/channel/:ucid" do |env|
sort_options = {"newest", "oldest", "popular"}
sort_by ||= "newest"
- items, count = get_60_videos(ucid, page, auto_generated, sort_by)
+ items, count = get_60_videos(channel.ucid, channel.author, page, channel.auto_generated, sort_by)
items.select! { |item| !item.paid }
+
+ env.set "search", "channel:#{channel.ucid} "
end
templated "channel"
@@ -2952,18 +2950,18 @@ get "/channel/:ucid/playlists" do |env|
sort_by ||= "last"
begin
- author, ucid, auto_generated, sub_count = get_about_info(ucid, locale)
+ channel = get_about_info(ucid, locale)
rescue ex
error_message = ex.message
env.response.status_code = 500
next templated "error"
end
- if auto_generated
- next env.redirect "/channel/#{ucid}"
+ if channel.auto_generated
+ next env.redirect "/channel/#{channel.ucid}"
end
- items, continuation = fetch_channel_playlists(ucid, author, auto_generated, continuation, sort_by)
+ items, continuation = fetch_channel_playlists(channel.ucid, channel.author, channel.auto_generated, continuation, sort_by)
items.select! { |item| item.is_a?(SearchPlaylist) && !item.videos.empty? }
items = items.map { |item| item.as(SearchPlaylist) }
items.each { |item| item.author = "" }
@@ -3003,7 +3001,7 @@ get "/api/v1/storyboards/:id" do |env|
client = make_client(YT_URL)
begin
- video = get_video(id, PG_DB, proxies, region: region)
+ video = get_video(id, PG_DB, region: region)
rescue ex : VideoRedirect
next env.redirect "/api/v1/storyboards/#{ex.message}"
rescue ex
@@ -3088,7 +3086,7 @@ get "/api/v1/captions/:id" do |env|
client = make_client(YT_URL)
begin
- video = get_video(id, PG_DB, proxies, region: region)
+ video = get_video(id, PG_DB, region: region)
rescue ex : VideoRedirect
next env.redirect "/api/v1/captions/#{ex.message}"
rescue ex
@@ -3219,7 +3217,7 @@ get "/api/v1/comments/:id" do |env|
sort_by ||= "top"
begin
- comments = fetch_youtube_comments(id, PG_DB, continuation, proxies, format, locale, thin_mode, region, sort_by: sort_by)
+ comments = fetch_youtube_comments(id, PG_DB, continuation, format, locale, thin_mode, region, sort_by: sort_by)
rescue ex
error_message = {"error" => ex.message}.to_json
env.response.status_code = 500
@@ -3276,9 +3274,9 @@ get "/api/v1/insights/:id" do |env|
client = make_client(YT_URL)
headers = HTTP::Headers.new
- html = client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1")
+ response = client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1")
- headers["cookie"] = html.cookies.add_request_headers(headers)["cookie"]
+ headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
headers["content-type"] = "application/x-www-form-urlencoded"
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
@@ -3288,9 +3286,7 @@ get "/api/v1/insights/:id" do |env|
headers["x-youtube-client-name"] = "1"
headers["x-youtube-client-version"] = "2.20180719"
- body = html.body
- session_token = body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/).not_nil!["session_token"]
-
+ session_token = response.body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/).try &.["session_token"]? || ""
post_req = {
session_token: session_token,
}
@@ -3429,7 +3425,7 @@ get "/api/v1/videos/:id" do |env|
region = env.params.query["region"]?
begin
- video = get_video(id, PG_DB, proxies, region: region)
+ video = get_video(id, PG_DB, region: region)
rescue ex : VideoRedirect
next env.redirect "/api/v1/videos/#{ex.message}"
rescue ex
@@ -3450,7 +3446,7 @@ get "/api/v1/trending" do |env|
trending_type = env.params.query["type"]?
begin
- trending, plid = fetch_trending(trending_type, proxies, region, locale)
+ trending, plid = fetch_trending(trending_type, region, locale)
rescue ex
error_message = {"error" => ex.message}.to_json
env.response.status_code = 500
@@ -3461,9 +3457,9 @@ get "/api/v1/trending" do |env|
json.array do
trending.each do |video|
video.to_json(locale, config, Kemal.config, json)
- end
- end
end
+ end
+ end
videos
end
@@ -3533,7 +3529,7 @@ get "/api/v1/channels/:ucid" do |env|
sort_by ||= "newest"
begin
- author, ucid, auto_generated = get_about_info(ucid, locale)
+ channel = get_about_info(ucid, locale)
rescue ex
error_message = {"error" => ex.message}.to_json
env.response.status_code = 500
@@ -3541,12 +3537,12 @@ get "/api/v1/channels/:ucid" do |env|
end
page = 1
- if auto_generated
+ if channel.auto_generated
videos = [] of SearchVideo
count = 0
else
begin
- videos, count = get_60_videos(ucid, page, auto_generated, sort_by)
+ videos, count = get_60_videos(channel.ucid, channel.author, page, channel.auto_generated, sort_by)
rescue ex
error_message = {"error" => ex.message}.to_json
env.response.status_code = 500
@@ -3554,85 +3550,34 @@ get "/api/v1/channels/:ucid" do |env|
end
end
- client = make_client(YT_URL)
- channel_html = client.get("/channel/#{ucid}/about?disable_polymer=1").body
- channel_html = XML.parse_html(channel_html)
- banner = channel_html.xpath_node(%q(//div[@id="gh-banner"]/style)).not_nil!.content
- banner = "https:" + banner.match(/background-image: url\((?<url>[^)]+)\)/).not_nil!["url"]
-
- author = channel_html.xpath_node(%q(//a[contains(@class, "branded-page-header-title-link")])).not_nil!.content
- author_url = channel_html.xpath_node(%q(//a[@class="channel-header-profile-image-container spf-link"])).not_nil!["href"]
- author_thumbnail = channel_html.xpath_node(%q(//img[@class="channel-header-profile-image"])).not_nil!["src"]
- description_html = channel_html.xpath_node(%q(//div[contains(@class,"about-description")])).try &.to_s || ""
-
- paid = channel_html.xpath_node(%q(//meta[@itemprop="paid"])).not_nil!["content"] == "True"
- is_family_friendly = channel_html.xpath_node(%q(//meta[@itemprop="isFamilyFriendly"])).not_nil!["content"] == "True"
- allowed_regions = channel_html.xpath_node(%q(//meta[@itemprop="regionsAllowed"])).not_nil!["content"].split(",")
-
- related_channels = channel_html.xpath_nodes(%q(//div[contains(@class, "branded-page-related-channels")]/ul/li))
- related_channels = related_channels.map do |node|
- related_id = node["data-external-id"]?
- related_id ||= ""
-
- anchor = node.xpath_node(%q(.//h3[contains(@class, "yt-lockup-title")]/a))
- related_title = anchor.try &.["title"]
- related_title ||= ""
-
- related_author_url = anchor.try &.["href"]
- related_author_url ||= ""
-
- related_author_thumbnail = node.xpath_node(%q(.//img)).try &.["data-thumb"]
- related_author_thumbnail ||= ""
-
- {
- id: related_id,
- author: related_title,
- author_url: related_author_url,
- author_thumbnail: related_author_thumbnail,
- }
- end
-
- total_views = 0_i64
- sub_count = 0_i64
- joined = Time.unix(0)
- metadata = channel_html.xpath_nodes(%q(//span[@class="about-stat"]))
- metadata.each do |item|
- case item.content
- when .includes? "views"
- total_views = item.content.gsub(/\D/, "").to_i64
- when .includes? "subscribers"
- sub_count = item.content.delete("subscribers").gsub(/\D/, "").to_i64
- when .includes? "Joined"
- joined = Time.parse(item.content.lchop("Joined "), "%b %-d, %Y", Time::Location.local)
- end
- end
-
- channel_info = JSON.build do |json|
+ JSON.build do |json|
# TODO: Refactor into `to_json` for InvidiousChannel
json.object do
- json.field "author", author
- json.field "authorId", ucid
- json.field "authorUrl", author_url
+ json.field "author", channel.author
+ json.field "authorId", channel.ucid
+ json.field "authorUrl", channel.author_url
json.field "authorBanners" do
json.array do
- qualities = {
- {width: 2560, height: 424},
- {width: 2120, height: 351},
- {width: 1060, height: 175},
- }
- qualities.each do |quality|
- json.object do
- json.field "url", banner.gsub("=w1060", "=w#{quality[:width]}")
- json.field "width", quality[:width]
- json.field "height", quality[:height]
+ if channel.banner
+ qualities = {
+ {width: 2560, height: 424},
+ {width: 2120, height: 351},
+ {width: 1060, height: 175},
+ }
+ qualities.each do |quality|
+ json.object do
+ json.field "url", channel.banner.not_nil!.gsub("=w1060-", "=w#{quality[:width]}-")
+ json.field "width", quality[:width]
+ json.field "height", quality[:height]
+ end
end
- end
- json.object do
- json.field "url", banner.rchop("=w1060-fcrop64=1,00005a57ffffa5a8-nd-c0xffffffff-rj-k-no")
- json.field "width", 512
- json.field "height", 288
+ json.object do
+ json.field "url", channel.banner.not_nil!.split("=w1060-")[0]
+ json.field "width", 512
+ json.field "height", 288
+ end
end
end
end
@@ -3643,7 +3588,7 @@ get "/api/v1/channels/:ucid" do |env|
qualities.each do |quality|
json.object do
- json.field "url", author_thumbnail.gsub("/s100-", "/s#{quality}-")
+ json.field "url", channel.author_thumbnail.gsub("=s100-", "=s#{quality}-")
json.field "width", quality
json.field "height", quality
end
@@ -3651,17 +3596,17 @@ get "/api/v1/channels/:ucid" do |env|
end
end
- json.field "subCount", sub_count
- json.field "totalViews", total_views
- json.field "joined", joined.to_unix
- json.field "paid", paid
+ json.field "subCount", channel.sub_count
+ json.field "totalViews", channel.total_views
+ json.field "joined", channel.joined.to_unix
+ json.field "paid", channel.paid
- json.field "autoGenerated", auto_generated
- json.field "isFamilyFriendly", is_family_friendly
- json.field "description", html_to_content(description_html)
- json.field "descriptionHtml", description_html
+ json.field "autoGenerated", channel.auto_generated
+ json.field "isFamilyFriendly", channel.is_family_friendly
+ json.field "description", html_to_content(channel.description_html)
+ json.field "descriptionHtml", channel.description_html
- json.field "allowedRegions", allowed_regions
+ json.field "allowedRegions", channel.allowed_regions
json.field "latestVideos" do
json.array do
@@ -3673,11 +3618,11 @@ get "/api/v1/channels/:ucid" do |env|
json.field "relatedChannels" do
json.array do
- related_channels.each do |related_channel|
+ channel.related_channels.each do |related_channel|
json.object do
- json.field "author", related_channel[:author]
- json.field "authorId", related_channel[:id]
- json.field "authorUrl", related_channel[:author_url]
+ json.field "author", related_channel.author
+ json.field "authorId", related_channel.ucid
+ json.field "authorUrl", related_channel.author_url
json.field "authorThumbnails" do
json.array do
@@ -3685,7 +3630,7 @@ get "/api/v1/channels/:ucid" do |env|
qualities.each do |quality|
json.object do
- json.field "url", related_channel[:author_thumbnail].gsub("=s48-", "=s#{quality}-")
+ json.field "url", related_channel.author_thumbnail.gsub("=s48-", "=s#{quality}-")
json.field "width", quality
json.field "height", quality
end
@@ -3698,8 +3643,6 @@ get "/api/v1/channels/:ucid" do |env|
end
end
end
-
- channel_info
end
{"/api/v1/channels/:ucid/videos", "/api/v1/channels/videos/:ucid"}.each do |route|
@@ -3716,7 +3659,7 @@ end
sort_by ||= "newest"
begin
- author, ucid, auto_generated = get_about_info(ucid, locale)
+ channel = get_about_info(ucid, locale)
rescue ex
error_message = {"error" => ex.message}.to_json
env.response.status_code = 500
@@ -3724,7 +3667,7 @@ end
end
begin
- videos, count = get_60_videos(ucid, page, auto_generated, sort_by)
+ videos, count = get_60_videos(channel.ucid, channel.author, page, channel.auto_generated, sort_by)
rescue ex
error_message = {"error" => ex.message}.to_json
env.response.status_code = 500
@@ -3735,11 +3678,11 @@ end
json.array do
videos.each do |video|
video.to_json(locale, config, Kemal.config, json)
- end
- end
- end
end
end
+ end
+ end
+end
{"/api/v1/channels/:ucid/latest", "/api/v1/channels/latest/:ucid"}.each do |route|
get route do |env|
@@ -3761,11 +3704,11 @@ end
json.array do
videos.each do |video|
video.to_json(locale, config, Kemal.config, json)
- end
- end
end
end
end
+ end
+end
{"/api/v1/channels/:ucid/playlists", "/api/v1/channels/playlists/:ucid"}.each do |route|
get route do |env|
@@ -3780,16 +3723,16 @@ end
sort_by ||= "last"
begin
- author, ucid, auto_generated = get_about_info(ucid, locale)
+ channel = get_about_info(ucid, locale)
rescue ex
error_message = {"error" => ex.message}.to_json
env.response.status_code = 500
next error_message
end
- items, continuation = fetch_channel_playlists(ucid, author, auto_generated, continuation, sort_by)
+ items, continuation = fetch_channel_playlists(channel.ucid, channel.author, channel.auto_generated, continuation, sort_by)
- response = JSON.build do |json|
+ JSON.build do |json|
json.object do
json.field "playlists" do
json.array do
@@ -3804,8 +3747,28 @@ end
json.field "continuation", continuation
end
end
+ end
+end
+
+{"/api/v1/channels/:ucid/comments", "/api/v1/channels/comments/:ucid"}.each do |route|
+ get route do |env|
+ locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+
+ env.response.content_type = "application/json"
+
+ ucid = env.params.url["ucid"]
- response
+ continuation = env.params.query["continuation"]?
+
+ # sort_by = env.params.query["sort_by"]?.try &.downcase
+
+ begin
+ fetch_channel_community(ucid, continuation, locale, config, Kemal.config)
+ rescue ex
+ env.response.status_code = 400
+ error_message = {"error" => ex.message}.to_json
+ next error_message
+ end
end
end
@@ -3863,14 +3826,11 @@ get "/api/v1/search" do |env|
search_params = produce_search_params(sort_by, date, content_type, duration, features)
rescue ex
env.response.status_code = 400
- next JSON.build do |json|
- json.object do
- json.field "error", ex.message
- end
- end
+ error_message = {"error" => ex.message}.to_json
+ next error_message
end
- count, search_results = search(query, page, search_params, proxies, region).as(Tuple)
+ count, search_results = search(query, page, search_params, region).as(Tuple)
JSON.build do |json|
json.array do
search_results.each do |item|
@@ -3880,6 +3840,42 @@ get "/api/v1/search" do |env|
end
end
+get "/api/v1/search/suggestions" do |env|
+ locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ region = env.params.query["region"]?
+
+ env.response.content_type = "application/json"
+
+ query = env.params.query["q"]?
+ query ||= ""
+
+ begin
+ client = make_client(URI.parse("https://suggestqueries.google.com"))
+ response = client.get("/complete/search?hl=en&gl=#{region}&client=youtube&ds=yt&q=#{URI.escape(query)}&callback=suggestCallback").body
+
+ body = response[35..-2]
+ body = JSON.parse(body).as_a
+ suggestions = body[1].as_a[0..-2]
+
+ JSON.build do |json|
+ json.object do
+ json.field "query", body[0].as_s
+ json.field "suggestions" do
+ json.array do
+ suggestions.each do |suggestion|
+ json.string suggestion[0].as_s
+ end
+ end
+ end
+ end
+ end
+ rescue ex
+ env.response.status_code = 500
+ error_message = {"error" => ex.message}.to_json
+ next error_message
+ end
+end
+
get "/api/v1/playlists/:plid" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
@@ -3946,11 +3942,11 @@ get "/api/v1/playlists/:plid" do |env|
json.array do
videos.each do |video|
video.to_json(locale, config, Kemal.config, json)
- end
end
end
end
end
+ end
if format == "html"
response = JSON.parse(response)
@@ -4049,7 +4045,7 @@ get "/api/v1/auth/notifications" do |env|
topics = env.params.query["topics"]?.try &.split(",").uniq.first(1000)
topics ||= [] of String
- create_notification_stream(env, proxies, config, Kemal.config, decrypt_function, topics, connection_channel)
+ create_notification_stream(env, config, Kemal.config, decrypt_function, topics, connection_channel)
end
post "/api/v1/auth/notifications" do |env|
@@ -4058,7 +4054,7 @@ post "/api/v1/auth/notifications" do |env|
topics = env.params.body["topics"]?.try &.split(",").uniq.first(1000)
topics ||= [] of String
- create_notification_stream(env, proxies, config, Kemal.config, decrypt_function, topics, connection_channel)
+ create_notification_stream(env, config, Kemal.config, decrypt_function, topics, connection_channel)
end
get "/api/v1/auth/preferences" do |env|
@@ -4303,7 +4299,7 @@ get "/api/manifest/dash/id/:id" do |env|
client = make_client(YT_URL)
begin
- video = get_video(id, PG_DB, proxies, region: region)
+ video = get_video(id, PG_DB, region: region)
rescue ex : VideoRedirect
url = "/api/manifest/dash/id/#{ex.message}"
if env.params.query
@@ -4493,7 +4489,7 @@ get "/latest_version" do |env|
next
end
- video = get_video(id, PG_DB, proxies, region: region)
+ video = get_video(id, PG_DB, region: region)
fmt_stream = video.fmt_stream(decrypt_function)
adaptive_fmts = video.adaptive_fmts(decrypt_function)
@@ -4600,7 +4596,7 @@ get "/videoplayback" do |env|
url = "/videoplayback?#{query_params.to_s}"
headers = HTTP::Headers.new
- {"Accept", "Accept-Encoding", "Cache-Control", "Connection", "If-None-Match", "Range"}.each do |header|
+ REQUEST_HEADERS_WHITELIST.each do |header|
if env.request.headers[header]?
headers[header] = env.request.headers[header]
end
@@ -4609,7 +4605,7 @@ get "/videoplayback" do |env|
response = HTTP::Client::Response.new(403)
5.times do
begin
- client = make_client(URI.parse(host), proxies, region)
+ client = make_client(URI.parse(host), region)
response = client.head(url, headers)
break
rescue Socket::Addrinfo::Error
@@ -4643,13 +4639,13 @@ get "/videoplayback" do |env|
next
end
- client = make_client(URI.parse(host), proxies, region)
+ client = make_client(URI.parse(host), region)
begin
client.get(url, headers) do |response|
env.response.status_code = response.status_code
response.headers.each do |key, value|
- if !{"Access-Control-Allow-Origin", "Alt-Svc", "Server"}.includes? key
+ if !RESPONSE_HEADERS_BLACKLIST.includes? key
env.response.headers[key] = value
end
end
@@ -4691,7 +4687,7 @@ get "/ggpht/*" do |env|
url = env.request.path.lchop("/ggpht")
headers = HTTP::Headers.new
- {"Accept", "Accept-Encoding", "Cache-Control", "Connection", "If-None-Match", "Range"}.each do |header|
+ REQUEST_HEADERS_WHITELIST.each do |header|
if env.request.headers[header]?
headers[header] = env.request.headers[header]
end
@@ -4699,18 +4695,20 @@ get "/ggpht/*" do |env|
begin
client.get(url, headers) do |response|
+ env.response.status_code = response.status_code
response.headers.each do |key, value|
- if !{"Access-Control-Allow-Origin", "Alt-Svc", "Server"}.includes? key
+ if !RESPONSE_HEADERS_BLACKLIST.includes? key
env.response.headers[key] = value
end
end
- if response.status_code == 304
+ env.response.headers["Access-Control-Allow-Origin"] = "*"
+
+ if response.status_code >= 300
+ env.response.headers.delete("Transfer-Encoding")
break
end
- env.response.headers["Access-Control-Allow-Origin"] = "*"
-
proxy_file(response, env)
end
rescue ex
@@ -4739,7 +4737,7 @@ get "/sb/:id/:storyboard/:index" do |env|
url = "/sb/#{id}/#{storyboard}/#{index}?#{env.params.query}"
headers = HTTP::Headers.new
- {"Accept", "Accept-Encoding", "Cache-Control", "Connection", "If-None-Match", "Range"}.each do |header|
+ REQUEST_HEADERS_WHITELIST.each do |header|
if env.request.headers[header]?
headers[header] = env.request.headers[header]
end
@@ -4749,17 +4747,18 @@ get "/sb/:id/:storyboard/:index" do |env|
client.get(url, headers) do |response|
env.response.status_code = response.status_code
response.headers.each do |key, value|
- if !{"Access-Control-Allow-Origin", "Alt-Svc", "Server"}.includes? key
+ if !RESPONSE_HEADERS_BLACKLIST.includes? key
env.response.headers[key] = value
end
end
- if response.status_code >= 400
+ env.response.headers["Access-Control-Allow-Origin"] = "*"
+
+ if response.status_code >= 300
+ env.response.headers.delete("Transfer-Encoding")
break
end
- env.response.headers["Access-Control-Allow-Origin"] = "*"
-
proxy_file(response, env)
end
rescue ex
@@ -4784,7 +4783,7 @@ get "/vi/:id/:name" do |env|
url = "/vi/#{id}/#{name}"
headers = HTTP::Headers.new
- {"Accept", "Accept-Encoding", "Cache-Control", "Connection", "If-None-Match", "Range"}.each do |header|
+ REQUEST_HEADERS_WHITELIST.each do |header|
if env.request.headers[header]?
headers[header] = env.request.headers[header]
end
@@ -4794,17 +4793,18 @@ get "/vi/:id/:name" do |env|
client.get(url, headers) do |response|
env.response.status_code = response.status_code
response.headers.each do |key, value|
- if !{"Access-Control-Allow-Origin", "Alt-Svc", "Server"}.includes? key
+ if !RESPONSE_HEADERS_BLACKLIST.includes? key
env.response.headers[key] = value
end
end
- if response.status_code == 304
+ env.response.headers["Access-Control-Allow-Origin"] = "*"
+
+ if response.status_code >= 300 && response.status_code != 404
+ env.response.headers.delete("Transfer-Encoding")
break
end
- env.response.headers["Access-Control-Allow-Origin"] = "*"
-
proxy_file(response, env)
end
rescue ex
diff --git a/src/invidious/channels.cr b/src/invidious/channels.cr
index 0582eb2f..d7f68b11 100644
--- a/src/invidious/channels.cr
+++ b/src/invidious/channels.cr
@@ -97,6 +97,35 @@ struct ChannelVideo
})
end
+struct AboutRelatedChannel
+ db_mapping({
+ ucid: String,
+ author: String,
+ author_url: String,
+ author_thumbnail: String,
+ })
+end
+
+# TODO: Refactor into either SearchChannel or InvidiousChannel
+struct AboutChannel
+ db_mapping({
+ ucid: String,
+ author: String,
+ auto_generated: Bool,
+ author_url: String,
+ author_thumbnail: String,
+ banner: String?,
+ description_html: String,
+ paid: Bool,
+ total_views: Int64,
+ sub_count: Int64,
+ joined: Time,
+ is_family_friendly: Bool,
+ allowed_regions: Array(String),
+ related_channels: Array(AboutRelatedChannel),
+ })
+end
+
def get_batch_channels(channels, db, refresh = false, pull_all_videos = true, max_threads = 10)
finished_channel = Channel(String | Nil).new
@@ -587,6 +616,244 @@ def extract_channel_playlists_cursor(url, auto_generated)
return cursor
end
+# TODO: Add "sort_by"
+def fetch_channel_community(ucid, continuation, locale, config, kemal_config)
+ client = make_client(YT_URL)
+ headers = HTTP::Headers.new
+ headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36"
+
+ response = client.get("/channel/#{ucid}/community?gl=US&hl=en", headers)
+ if response.status_code == 404
+ response = client.get("/user/#{ucid}/community?gl=US&hl=en", headers)
+ end
+
+ if response.status_code == 404
+ error_message = translate(locale, "This channel does not exist.")
+ raise error_message
+ end
+
+ if !continuation || continuation.empty?
+ response = JSON.parse(response.body.match(/window\["ytInitialData"\] = (?<info>.*?);\n/).try &.["info"] || "{}")
+ ucid = response["responseContext"]["serviceTrackingParams"]
+ .as_a.select { |service| service["service"] == "GFEEDBACK" }[0]?.try &.["params"]
+ .as_a.select { |param| param["key"] == "browse_id" }[0]?.try &.["value"].as_s
+ body = response["contents"]?.try &.["twoColumnBrowseResultsRenderer"]["tabs"].as_a.select { |tab| tab["tabRenderer"]?.try &.["selected"].as_bool.== true }[0]?
+
+ if !body
+ raise "Could not extract community tab."
+ end
+
+ body = body["tabRenderer"]["content"]["sectionListRenderer"]["contents"][0]["itemSectionRenderer"]
+ else
+ headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
+ headers["content-type"] = "application/x-www-form-urlencoded"
+
+ headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
+ headers["x-spf-previous"] = ""
+ headers["x-spf-referer"] = ""
+
+ headers["x-youtube-client-name"] = "1"
+ headers["x-youtube-client-version"] = "2.20180719"
+
+ session_token = response.body.match(/"XSRF_TOKEN":"(?<session_token>[A-Za-z0-9\_\-\=]+)"/).try &.["session_token"]? || ""
+ post_req = {
+ session_token: session_token,
+ }
+
+ response = client.post("/comment_service_ajax?action_get_comments=1&ctoken=#{continuation}&continuation=#{continuation}&hl=en&gl=US", headers, form: post_req)
+ body = JSON.parse(response.body)
+
+ ucid = body["response"]["responseContext"]["serviceTrackingParams"]
+ .as_a.select { |service| service["service"] == "GFEEDBACK" }[0]?.try &.["params"]
+ .as_a.select { |param| param["key"] == "browse_id" }[0]?.try &.["value"].as_s
+
+ body = body["response"]["continuationContents"]["itemSectionContinuation"]? ||
+ body["response"]["continuationContents"]["backstageCommentsContinuation"]?
+
+ if !body
+ raise "Could not extract continuation."
+ end
+ end
+
+ continuation = body["continuations"]?.try &.[0]["nextContinuationData"]["continuation"].as_s
+ posts = body["contents"].as_a
+
+ if message = posts[0]["messageRenderer"]?
+ error_message = (message["text"]["simpleText"]? ||
+ message["text"]["runs"]?.try &.[0]?.try &.["text"]?)
+ .try &.as_s || ""
+ raise error_message
+ end
+
+ JSON.build do |json|
+ json.object do
+ json.field "authorId", ucid
+ json.field "comments" do
+ json.array do
+ posts.each do |post|
+ comments = post["backstagePostThreadRenderer"]?.try &.["comments"]? ||
+ post["backstageCommentsContinuation"]?
+
+ post = post["backstagePostThreadRenderer"]?.try &.["post"]["backstagePostRenderer"]? ||
+ post["commentThreadRenderer"]?.try &.["comment"]["commentRenderer"]?
+
+ if !post
+ next
+ end
+
+ if !post["contentText"]?
+ content_html = ""
+ else
+ content_html = post["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff').try { |block| HTML.escape(block) }.to_s ||
+ content_to_comment_html(post["contentText"]["runs"].as_a).try &.to_s || ""
+ end
+
+ author = post["authorText"]?.try &.["simpleText"]? || ""
+
+ json.object do
+ json.field "author", author
+ json.field "authorThumbnails" do
+ json.array do
+ qualities = {32, 48, 76, 100, 176, 512}
+ author_thumbnail = post["authorThumbnail"]["thumbnails"].as_a[0]["url"].as_s
+
+ qualities.each do |quality|
+ json.object do
+ json.field "url", author_thumbnail.gsub(/s\d+-/, "s#{quality}-")
+ json.field "width", quality
+ json.field "height", quality
+ end
+ end
+ end
+ end
+
+ if post["authorEndpoint"]?
+ json.field "authorId", post["authorEndpoint"]["browseEndpoint"]["browseId"]
+ json.field "authorUrl", post["authorEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"].as_s
+ else
+ json.field "authorId", ""
+ json.field "authorUrl", ""
+ end
+
+ published_text = post["publishedTimeText"]["runs"][0]["text"].as_s
+ published = decode_date(published_text.rchop(" (edited)"))
+
+ if published_text.includes?(" (edited)")
+ json.field "isEdited", true
+ else
+ json.field "isEdited", false
+ end
+
+ like_count = post["actionButtons"]["commentActionButtonsRenderer"]["likeButton"]["toggleButtonRenderer"]["accessibilityData"]["accessibilityData"]["label"]
+ .try &.as_s.gsub(/\D/, "").to_i? || 0
+
+ json.field "content", html_to_content(content_html)
+ json.field "contentHtml", content_html
+
+ json.field "published", published.to_unix
+ json.field "publishedText", translate(locale, "`x` ago", recode_date(published, locale))
+
+ json.field "likeCount", like_count
+ json.field "commentId", post["postId"]? || post["commentId"]? || ""
+
+ if attachment = post["backstageAttachment"]?
+ json.field "attachment" do
+ json.object do
+ case attachment.as_h
+ when .has_key?("videoRenderer")
+ attachment = attachment["videoRenderer"]
+ json.field "type", "video"
+
+ if !attachment["videoId"]?
+ error_message = (attachment["title"]["simpleText"]? ||
+ attachment["title"]["runs"]?.try &.[0]?.try &.["text"]?)
+
+ json.field "error", error_message
+ else
+ video_id = attachment["videoId"].as_s
+
+ json.field "title", attachment["title"]["simpleText"].as_s
+ json.field "videoId", video_id
+ json.field "videoThumbnails" do
+ generate_thumbnails(json, video_id, config, kemal_config)
+ end
+
+ json.field "lengthSeconds", decode_length_seconds(attachment["lengthText"]["simpleText"].as_s)
+
+ author_info = attachment["ownerText"]["runs"][0].as_h
+
+ json.field "author", author_info["text"].as_s
+ json.field "authorId", author_info["navigationEndpoint"]["browseEndpoint"]["browseId"]
+ json.field "authorUrl", author_info["navigationEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"]
+
+ # TODO: json.field "authorThumbnails", "channelThumbnailSupportedRenderers"
+ # TODO: json.field "authorVerified", "ownerBadges"
+
+ published = decode_date(attachment["publishedTimeText"]["simpleText"].as_s)
+
+ json.field "published", published.to_unix
+ json.field "publishedText", translate(locale, "`x` ago", recode_date(published, locale))
+
+ view_count = attachment["viewCountText"]["simpleText"].as_s.gsub(/\D/, "").to_i64? || 0_i64
+
+ json.field "viewCount", view_count
+ json.field "viewCountText", translate(locale, "`x` views", number_to_short_text(view_count))
+ end
+ when .has_key?("backstageImageRenderer")
+ attachment = attachment["backstageImageRenderer"]
+ json.field "type", "image"
+
+ json.field "imageThumbnails" do
+ json.array do
+ thumbnail = attachment["image"]["thumbnails"][0].as_h
+ width = thumbnail["width"].as_i
+ height = thumbnail["height"].as_i
+ aspect_ratio = (width.to_f / height.to_f)
+
+ qualities = {320, 560, 640, 1280, 2000}
+
+ qualities.each do |quality|
+ json.object do
+ json.field "url", thumbnail["url"].as_s.gsub("=s640-", "=s#{quality}-")
+ json.field "width", quality
+ json.field "height", (quality / aspect_ratio).ceil.to_i
+ end
+ end
+ end
+ end
+ else
+ # TODO
+ end
+ end
+ end
+ end
+
+ if comments && (reply_count = (comments["backstageCommentsRenderer"]["moreText"]["simpleText"]? ||
+ comments["backstageCommentsRenderer"]["moreText"]["runs"]?.try &.[0]?.try &.["text"]?)
+ .try &.as_s.gsub(/\D/, "").to_i?)
+ continuation = comments["backstageCommentsRenderer"]["continuations"]?.try &.as_a[0]["nextContinuationData"]["continuation"].as_s
+ continuation ||= ""
+
+ json.field "replies" do
+ json.object do
+ json.field "replyCount", reply_count
+ json.field "continuation", continuation
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ if body["continuations"]?
+ continuation = body["continuations"][0]["nextContinuationData"]["continuation"]
+ json.field "continuation", continuation
+ end
+ end
+ end
+end
+
def get_about_info(ucid, locale)
client = make_client(YT_URL)
@@ -599,14 +866,12 @@ def get_about_info(ucid, locale)
if about.xpath_node(%q(//div[contains(@class, "channel-empty-message")]))
error_message = translate(locale, "This channel does not exist.")
-
raise error_message
end
if about.xpath_node(%q(//span[contains(@class,"qualified-channel-title-text")]/a)).try &.content.empty?
error_message = about.xpath_node(%q(//div[@class="yt-alert-content"])).try &.content.strip
error_message ||= translate(locale, "Could not get channel info.")
-
raise error_message
end
@@ -617,8 +882,63 @@ def get_about_info(ucid, locale)
sub_count ||= 0
author = about.xpath_node(%q(//span[contains(@class,"qualified-channel-title-text")]/a)).not_nil!.content
+ author_url = about.xpath_node(%q(//span[contains(@class,"qualified-channel-title-text")]/a)).not_nil!["href"]
+ author_thumbnail = about.xpath_node(%q(//img[@class="channel-header-profile-image"])).not_nil!["src"]
+
ucid = about.xpath_node(%q(//meta[@itemprop="channelId"])).not_nil!["content"]
+ banner = about.xpath_node(%q(//div[@id="gh-banner"]/style)).not_nil!.content
+ banner = "https:" + banner.match(/background-image: url\((?<url>[^)]+)\)/).not_nil!["url"]
+
+ if banner.includes? "channels/c4/default_banner"
+ banner = nil
+ end
+
+ description_html = about.xpath_node(%q(//div[contains(@class,"about-description")])).try &.to_s || ""
+
+ paid = about.xpath_node(%q(//meta[@itemprop="paid"])).not_nil!["content"] == "True"
+ is_family_friendly = about.xpath_node(%q(//meta[@itemprop="isFamilyFriendly"])).not_nil!["content"] == "True"
+ allowed_regions = about.xpath_node(%q(//meta[@itemprop="regionsAllowed"])).not_nil!["content"].split(",")
+
+ related_channels = about.xpath_nodes(%q(//div[contains(@class, "branded-page-related-channels")]/ul/li))
+ related_channels = related_channels.map do |node|
+ related_id = node["data-external-id"]?
+ related_id ||= ""
+
+ anchor = node.xpath_node(%q(.//h3[contains(@class, "yt-lockup-title")]/a))
+ related_title = anchor.try &.["title"]
+ related_title ||= ""
+
+ related_author_url = anchor.try &.["href"]
+ related_author_url ||= ""
+
+ related_author_thumbnail = node.xpath_node(%q(.//img)).try &.["data-thumb"]
+ related_author_thumbnail ||= ""
+
+ AboutRelatedChannel.new(
+ ucid: related_id,
+ author: related_title,
+ author_url: related_author_url,
+ author_thumbnail: related_author_thumbnail,
+ )
+ end
+
+ total_views = 0_i64
+ sub_count = 0_i64
+
+ joined = Time.unix(0)
+ metadata = about.xpath_nodes(%q(//span[@class="about-stat"]))
+ metadata.each do |item|
+ case item.content
+ when .includes? "views"
+ total_views = item.content.gsub(/\D/, "").to_i64
+ when .includes? "subscribers"
+ sub_count = item.content.delete("subscribers").gsub(/\D/, "").to_i64
+ when .includes? "Joined"
+ joined = Time.parse(item.content.lchop("Joined "), "%b %-d, %Y", Time::Location.local)
+ end
+ end
+
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
auto_generated = false
@@ -627,10 +947,25 @@ def get_about_info(ucid, locale)
auto_generated = true
end
- return {author, ucid, auto_generated, sub_count}
+ return AboutChannel.new(
+ ucid: ucid,
+ author: author,
+ auto_generated: auto_generated,
+ author_url: author_url,
+ author_thumbnail: author_thumbnail,
+ banner: banner,
+ description_html: description_html,
+ paid: paid,
+ total_views: total_views,
+ sub_count: sub_count,
+ joined: joined,
+ is_family_friendly: is_family_friendly,
+ allowed_regions: allowed_regions,
+ related_channels: related_channels
+ )
end
-def get_60_videos(ucid, page, auto_generated, sort_by = "newest")
+def get_60_videos(ucid, author, page, auto_generated, sort_by = "newest")
count = 0
videos = [] of SearchVideo
@@ -652,7 +987,7 @@ def get_60_videos(ucid, page, auto_generated, sort_by = "newest")
if auto_generated
videos += extract_videos(nodeset)
else
- videos += extract_videos(nodeset, ucid)
+ videos += extract_videos(nodeset, ucid, author)
end
else
break
diff --git a/src/invidious/comments.cr b/src/invidious/comments.cr
index 7f593760..e2de8714 100644
--- a/src/invidious/comments.cr
+++ b/src/invidious/comments.cr
@@ -22,6 +22,7 @@ class RedditComment
replies: RedditThing | String,
score: Int32,
depth: Int32,
+ permalink: String,
created_utc: {
type: Time,
converter: RedditComment::TimeConverter,
@@ -56,14 +57,14 @@ class RedditListing
})
end
-def fetch_youtube_comments(id, db, continuation, proxies, format, locale, thin_mode, region, sort_by = "top")
- video = get_video(id, db, proxies, region: region)
+def fetch_youtube_comments(id, db, continuation, format, locale, thin_mode, region, sort_by = "top")
+ video = get_video(id, db, region: region)
session_token = video.info["session_token"]?
ctoken = produce_comment_continuation(id, cursor: "", sort_by: sort_by)
continuation ||= ctoken
- if !continuation || !session_token
+ if !continuation || continuation.empty? || !session_token
if format == "json"
return {"comments" => [] of String}.to_json
else
@@ -75,7 +76,7 @@ def fetch_youtube_comments(id, db, continuation, proxies, format, locale, thin_m
session_token: session_token,
}
- client = make_client(YT_URL, proxies, video.info["region"]?)
+ client = make_client(YT_URL, video.info["region"]?)
headers = HTTP::Headers.new
headers["content-type"] = "application/x-www-form-urlencoded"
@@ -177,8 +178,10 @@ def fetch_youtube_comments(id, db, continuation, proxies, format, locale, thin_m
json.field "content", html_to_content(content_html)
json.field "contentHtml", content_html
+
json.field "published", published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(published, locale))
+
json.field "likeCount", node_comment["likeCount"]
json.field "commentId", node_comment["commentId"]
json.field "authorIsChannelOwner", node_comment["authorIsChannelOwner"]
@@ -273,56 +276,55 @@ def fetch_reddit_comments(id, sort_by = "confidence")
end
def template_youtube_comments(comments, locale, thin_mode)
- html = ""
-
- root = comments["comments"].as_a
- root.each do |child|
- if child["replies"]?
- replies_html = <<-END_HTML
- <div id="replies" class="pure-g">
- <div class="pure-u-1-24"></div>
- <div class="pure-u-23-24">
- <p>
- <a href="javascript:void(0)" data-continuation="#{child["replies"]["continuation"]}"
- onclick="get_youtube_replies(this)">#{translate(locale, "View `x` replies", child["replies"]["replyCount"].to_s)}</a>
- </p>
+ String.build do |html|
+ root = comments["comments"].as_a
+ root.each do |child|
+ if child["replies"]?
+ replies_html = <<-END_HTML
+ <div id="replies" class="pure-g">
+ <div class="pure-u-1-24"></div>
+ <div class="pure-u-23-24">
+ <p>
+ <a href="javascript:void(0)" data-continuation="#{child["replies"]["continuation"]}"
+ onclick="get_youtube_replies(this)">#{translate(locale, "View `x` replies", child["replies"]["replyCount"].to_s)}</a>
+ </p>
+ </div>
</div>
- </div>
- END_HTML
- end
-
- if !thin_mode
- author_thumbnail = "/ggpht#{URI.parse(child["authorThumbnails"][-1]["url"].as_s).full_path}"
- else
- author_thumbnail = ""
- end
+ END_HTML
+ end
- html += <<-END_HTML
- <div class="pure-g">
- <div class="pure-u-4-24 pure-u-md-2-24">
- <img style="width:90%;padding-right:1em;padding-top:1em" src="#{author_thumbnail}">
- </div>
- <div class="pure-u-20-24 pure-u-md-22-24">
- <p>
- <b>
- <a class="#{child["authorIsChannelOwner"] == true ? "channel-owner" : ""}" href="#{child["authorUrl"]}">#{child["author"]}</a>
- </b>
- <p style="white-space:pre-wrap">#{child["contentHtml"]}</p>
- <span title="#{Time.unix(child["published"].as_i64).to_s(translate(locale, "%A %B %-d, %Y"))}">#{translate(locale, "`x` ago", recode_date(Time.unix(child["published"].as_i64), locale))} #{child["isEdited"] == true ? translate(locale, "(edited)") : ""}</span>
- |
- <a href="https://www.youtube.com/watch?v=#{comments["videoId"]}&lc=#{child["commentId"]}" title="#{translate(locale, "YouTube comment permalink")}">[YT]</a>
- |
- <i class="icon ion-ios-thumbs-up"></i> #{number_with_separator(child["likeCount"])}
- END_HTML
-
- if child["creatorHeart"]?
if !thin_mode
- creator_thumbnail = "/ggpht#{URI.parse(child["creatorHeart"]["creatorThumbnail"].as_s).full_path}"
+ author_thumbnail = "/ggpht#{URI.parse(child["authorThumbnails"][-1]["url"].as_s).full_path}"
else
- creator_thumbnail = ""
+ author_thumbnail = ""
end
- html += <<-END_HTML
+ html << <<-END_HTML
+ <div class="pure-g">
+ <div class="channel-profile pure-u-4-24 pure-u-md-2-24">
+ <img style="padding-right:1em;padding-top:1em" src="#{author_thumbnail}">
+ </div>
+ <div class="pure-u-20-24 pure-u-md-22-24">
+ <p>
+ <b>
+ <a class="#{child["authorIsChannelOwner"] == true ? "channel-owner" : ""}" href="#{child["authorUrl"]}">#{child["author"]}</a>
+ </b>
+ <p style="white-space:pre-wrap">#{child["contentHtml"]}</p>
+ <span title="#{Time.unix(child["published"].as_i64).to_s(translate(locale, "%A %B %-d, %Y"))}">#{translate(locale, "`x` ago", recode_date(Time.unix(child["published"].as_i64), locale))} #{child["isEdited"] == true ? translate(locale, "(edited)") : ""}</span>
+ |
+ <a href="https://www.youtube.com/watch?v=#{comments["videoId"]}&lc=#{child["commentId"]}" title="#{translate(locale, "YouTube comment permalink")}">[YT]</a>
+ |
+ <i class="icon ion-ios-thumbs-up"></i> #{number_with_separator(child["likeCount"])}
+ END_HTML
+
+ if child["creatorHeart"]?
+ if !thin_mode
+ creator_thumbnail = "/ggpht#{URI.parse(child["creatorHeart"]["creatorThumbnail"].as_s).full_path}"
+ else
+ creator_thumbnail = ""
+ end
+
+ html << <<-END_HTML
<span class="creator-heart-container" title="#{translate(locale, "`x` marked it with a ❤", child["creatorHeart"]["creatorName"].as_s)}">
<div class="creator-heart">
<img class="creator-heart-background-hearted" src="#{creator_thumbnail}"></img>
@@ -331,84 +333,77 @@ def template_youtube_comments(comments, locale, thin_mode)
</div>
</div>
</span>
- END_HTML
- end
+ END_HTML
+ end
- html += <<-END_HTML
- </p>
- #{replies_html}
+ html << <<-END_HTML
+ </p>
+ #{replies_html}
+ </div>
</div>
- </div>
- END_HTML
- end
+ END_HTML
+ end
- if comments["continuation"]?
- html += <<-END_HTML
- <div class="pure-g">
- <div class="pure-u-1">
- <p>
- <a href="javascript:void(0)" data-continuation="#{comments["continuation"]}"
- onclick="get_youtube_replies(this, true)">#{translate(locale, "Load more")}</a>
- </p>
+ if comments["continuation"]?
+ html << <<-END_HTML
+ <div class="pure-g">
+ <div class="pure-u-1">
+ <p>
+ <a href="javascript:void(0)" data-continuation="#{comments["continuation"]}"
+ onclick="get_youtube_replies(this, true)">#{translate(locale, "Load more")}</a>
+ </p>
+ </div>
</div>
- </div>
- END_HTML
+ END_HTML
+ end
end
-
- return html
end
def template_reddit_comments(root, locale)
- html = ""
- root.each do |child|
- if child.data.is_a?(RedditComment)
- child = child.data.as(RedditComment)
- author = child.author
- score = child.score
- body_html = HTML.unescape(child.body_html)
-
- replies_html = ""
- if child.replies.is_a?(RedditThing)
- replies = child.replies.as(RedditThing)
- replies_html = template_reddit_comments(replies.data.as(RedditListing).children, locale)
- end
-
- content = <<-END_HTML
- <p>
- <a href="javascript:void(0)" onclick="toggle_parent(this)">[ - ]</a>
- <b><a href="https://www.reddit.com/user/#{author}">#{author}</a></b>
- #{translate(locale, "`x` points", number_with_separator(score))}
- #{translate(locale, "`x` ago", recode_date(child.created_utc, locale))}
- </p>
- <div>
- #{body_html}
- #{replies_html}
- </div>
- END_HTML
+ String.build do |html|
+ root.each do |child|
+ if child.data.is_a?(RedditComment)
+ child = child.data.as(RedditComment)
+ body_html = HTML.unescape(child.body_html)
+
+ replies_html = ""
+ if child.replies.is_a?(RedditThing)
+ replies = child.replies.as(RedditThing)
+ replies_html = template_reddit_comments(replies.data.as(RedditListing).children, locale)
+ end
- if child.depth > 0
- html += <<-END_HTML
+ if child.depth > 0
+ html << <<-END_HTML
<div class="pure-g">
<div class="pure-u-1-24">
</div>
<div class="pure-u-23-24">
- #{content}
- </div>
- </div>
- END_HTML
- else
- html += <<-END_HTML
+ END_HTML
+ else
+ html << <<-END_HTML
<div class="pure-g">
<div class="pure-u-1">
- #{content}
- </div>
- </div>
+ END_HTML
+ end
+
+ html << <<-END_HTML
+ <p>
+ <a href="javascript:void(0)" onclick="toggle_parent(this)">[ - ]</a>
+ <b><a href="https://www.reddit.com/user/#{child.author}">#{child.author}</a></b>
+ #{translate(locale, "`x` points", number_with_separator(child.score))}
+ <span title="#{child.created_utc.to_s(translate(locale, "%a %B %-d %T %Y UTC"))}">#{translate(locale, "`x` ago", recode_date(child.created_utc, locale))}</span>
+ <a href="https://www.reddit.com#{child.permalink}" title="#{translate(locale, "permalink")}">#{translate(locale, "permalink")}</a>
+ </p>
+ <div>
+ #{body_html}
+ #{replies_html}
+ </div>
+ </div>
+ </div>
END_HTML
end
end
end
-
- return html
end
def replace_links(html)
@@ -508,7 +503,7 @@ def content_to_comment_html(content)
end
text
- end.join.rchop('\ufeff')
+ end.join("").delete('\ufeff')
return comment_html
end
diff --git a/src/invidious/helpers/helpers.cr b/src/invidious/helpers/helpers.cr
index aedd3053..039ac55b 100644
--- a/src/invidious/helpers/helpers.cr
+++ b/src/invidious/helpers/helpers.cr
@@ -664,7 +664,7 @@ def copy_in_chunks(input, output, chunk_size = 4096)
end
end
-def create_notification_stream(env, proxies, config, kemal_config, decrypt_function, topics, connection_channel)
+def create_notification_stream(env, config, kemal_config, decrypt_function, topics, connection_channel)
connection = Channel(PQ::Notification).new(8)
connection_channel.send({true, connection})
@@ -682,7 +682,7 @@ def create_notification_stream(env, proxies, config, kemal_config, decrypt_funct
published = Time.utc - Time::Span.new(time_span[0], time_span[1], time_span[2], time_span[3])
video_id = TEST_IDS[rand(TEST_IDS.size)]
- video = get_video(video_id, PG_DB, proxies)
+ video = get_video(video_id, PG_DB)
video.published = published
response = JSON.parse(video.to_json(locale, config, kemal_config, decrypt_function))
@@ -758,7 +758,7 @@ def create_notification_stream(env, proxies, config, kemal_config, decrypt_funct
next
end
- video = get_video(video_id, PG_DB, proxies)
+ video = get_video(video_id, PG_DB)
video.published = Time.unix(published)
response = JSON.parse(video.to_json(locale, config, Kemal.config, decrypt_function))
diff --git a/src/invidious/helpers/static_file_handler.cr b/src/invidious/helpers/static_file_handler.cr
new file mode 100644
index 00000000..87edbcd3
--- /dev/null
+++ b/src/invidious/helpers/static_file_handler.cr
@@ -0,0 +1,194 @@
+# Since systems have a limit on number of open files (`ulimit -a`),
+# we serve them from memory to avoid 'Too many open files' without needing
+# to modify ulimit.
+#
+# Very heavily re-used:
+# https://github.com/kemalcr/kemal/blob/master/src/kemal/helpers/helpers.cr
+# https://github.com/kemalcr/kemal/blob/master/src/kemal/static_file_handler.cr
+#
+# Changes:
+# - A `send_file` overload is added which supports sending a Slice, file_path, filestat
+# - `StaticFileHandler` is patched to cache to and serve from @cached_files
+
+private def multipart(file, env : HTTP::Server::Context)
+ # See http://httpwg.org/specs/rfc7233.html
+ fileb = file.size
+ startb = endb = 0
+
+ if match = env.request.headers["Range"].match /bytes=(\d{1,})-(\d{0,})/
+ startb = match[1].to_i { 0 } if match.size >= 2
+ endb = match[2].to_i { 0 } if match.size >= 3
+ end
+
+ endb = fileb - 1 if endb == 0
+
+ if startb < endb < fileb
+ content_length = 1 + endb - startb
+ env.response.status_code = 206
+ env.response.content_length = content_length
+ env.response.headers["Accept-Ranges"] = "bytes"
+ env.response.headers["Content-Range"] = "bytes #{startb}-#{endb}/#{fileb}" # MUST
+
+ if startb > 1024
+ skipped = 0
+ # file.skip only accepts values less or equal to 1024 (buffer size, undocumented)
+ until (increase_skipped = skipped + 1024) > startb
+ file.skip(1024)
+ skipped = increase_skipped
+ end
+ if (skipped_minus_startb = skipped - startb) > 0
+ file.skip skipped_minus_startb
+ end
+ else
+ file.skip(startb)
+ end
+
+ IO.copy(file, env.response, content_length)
+ else
+ env.response.content_length = fileb
+ env.response.status_code = 200 # Range not satisfable, see 4.4 Note
+ IO.copy(file, env.response)
+ end
+end
+
+# Set the Content-Disposition to "attachment" with the specified filename,
+# instructing the user agents to prompt to save.
+private def attachment(env : HTTP::Server::Context, filename : String? = nil, disposition : String? = nil)
+ disposition = "attachment" if disposition.nil? && filename
+ if disposition && filename
+ env.response.headers["Content-Disposition"] = "#{disposition}; filename=\"#{File.basename(filename)}\""
+ end
+end
+
+def send_file(env : HTTP::Server::Context, file_path : String, data : Slice(UInt8), filestat : File::Info, filename : String? = nil, disposition : String? = nil)
+ config = Kemal.config.serve_static
+ mime_type = MIME.from_filename(file_path, "application/octet-stream")
+ env.response.content_type = mime_type
+ env.response.headers["Accept-Ranges"] = "bytes"
+ env.response.headers["X-Content-Type-Options"] = "nosniff"
+ minsize = 860 # http://webmasters.stackexchange.com/questions/31750/what-is-recommended-minimum-object-size-for-gzip-performance-benefits ??
+ request_headers = env.request.headers
+ filesize = data.bytesize
+ attachment(env, filename, disposition)
+
+ Kemal.config.static_headers.try(&.call(env.response, file_path, filestat))
+
+ file = IO::Memory.new(data)
+ if env.request.method == "GET" && env.request.headers.has_key?("Range")
+ return multipart(file, env)
+ end
+
+ condition = config.is_a?(Hash) && config["gzip"]? == true && filesize > minsize && Kemal::Utils.zip_types(file_path)
+ if condition && request_headers.includes_word?("Accept-Encoding", "gzip")
+ env.response.headers["Content-Encoding"] = "gzip"
+ Gzip::Writer.open(env.response) do |deflate|
+ IO.copy(file, deflate)
+ end
+ elsif condition && request_headers.includes_word?("Accept-Encoding", "deflate")
+ env.response.headers["Content-Encoding"] = "deflate"
+ Flate::Writer.open(env.response) do |deflate|
+ IO.copy(file, deflate)
+ end
+ else
+ env.response.content_length = filesize
+ IO.copy(file, env.response)
+ end
+
+ return
+end
+
+module Kemal
+ class StaticFileHandler < HTTP::StaticFileHandler
+ CACHE_LIMIT = 5_000_000 # 5MB
+ @cached_files = {} of String => {data: Bytes, filestat: File::Info}
+
+ def call(context : HTTP::Server::Context)
+ return call_next(context) if context.request.path.not_nil! == "/"
+
+ case context.request.method
+ when "GET", "HEAD"
+ else
+ if @fallthrough
+ call_next(context)
+ else
+ context.response.status_code = 405
+ context.response.headers.add("Allow", "GET, HEAD")
+ end
+ return
+ end
+
+ config = Kemal.config.serve_static
+ original_path = context.request.path.not_nil!
+ request_path = URI.unescape(original_path)
+
+ # File path cannot contains '\0' (NUL) because all filesystem I know
+ # don't accept '\0' character as file name.
+ if request_path.includes? '\0'
+ context.response.status_code = 400
+ return
+ end
+
+ expanded_path = File.expand_path(request_path, "/")
+ is_dir_path = if original_path.ends_with?('/') && !expanded_path.ends_with? '/'
+ expanded_path = expanded_path + '/'
+ true
+ else
+ expanded_path.ends_with? '/'
+ end
+
+ file_path = File.join(@public_dir, expanded_path)
+
+ if file = @cached_files[file_path]?
+ last_modified = file[:filestat].modification_time
+ add_cache_headers(context.response.headers, last_modified)
+
+ if cache_request?(context, last_modified)
+ context.response.status_code = 304
+ return
+ end
+
+ send_file(context, file_path, file[:data], file[:filestat])
+ else
+ is_dir = Dir.exists? file_path
+
+ if request_path != expanded_path
+ redirect_to context, expanded_path
+ elsif is_dir && !is_dir_path
+ redirect_to context, expanded_path + '/'
+ end
+
+ if Dir.exists?(file_path)
+ if config.is_a?(Hash) && config["dir_listing"] == true
+ context.response.content_type = "text/html"
+ directory_listing(context.response, request_path, file_path)
+ else
+ call_next(context)
+ end
+ elsif File.exists?(file_path)
+ last_modified = modification_time(file_path)
+ add_cache_headers(context.response.headers, last_modified)
+
+ if cache_request?(context, last_modified)
+ context.response.status_code = 304
+ return
+ end
+
+ if @cached_files.sum { |element| element[1][:data].bytesize } + (size = File.size(file_path)) < CACHE_LIMIT
+ data = Bytes.new(size)
+ File.open(file_path) do |file|
+ file.read(data)
+ end
+ filestat = File.info(file_path)
+
+ @cached_files[file_path] = {data: data, filestat: filestat}
+ send_file(context, file_path, data, filestat)
+ else
+ send_file(context, file_path)
+ end
+ else
+ call_next(context)
+ end
+ end
+ end
+ end
+end
diff --git a/src/invidious/helpers/utils.cr b/src/invidious/helpers/utils.cr
index bad2e3a3..1eb02b89 100644
--- a/src/invidious/helpers/utils.cr
+++ b/src/invidious/helpers/utils.cr
@@ -18,24 +18,13 @@ def elapsed_text(elapsed)
"#{(millis * 1000).round(2)}µs"
end
-def make_client(url : URI, proxies = {} of String => Array({ip: String, port: Int32}), region = nil)
- context = nil
-
- if url.scheme == "https"
- context = OpenSSL::SSL::Context::Client.new
- context.add_options(
- OpenSSL::SSL::Options::ALL |
- OpenSSL::SSL::Options::NO_SSL_V2 |
- OpenSSL::SSL::Options::NO_SSL_V3
- )
- end
-
- client = HTTPClient.new(url, context)
- client.read_timeout = 10.seconds
- client.connect_timeout = 10.seconds
+def make_client(url : URI, region = nil)
+ client = HTTPClient.new(url)
+ client.read_timeout = 15.seconds
+ client.connect_timeout = 15.seconds
if region
- proxies[region]?.try &.sample(40).each do |proxy|
+ PROXY_LIST[region]?.try &.sample(40).each do |proxy|
begin
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
client.set_proxy(proxy)
diff --git a/src/invidious/search.cr b/src/invidious/search.cr
index c69f96cf..ebeb2236 100644
--- a/src/invidious/search.cr
+++ b/src/invidious/search.cr
@@ -256,8 +256,8 @@ def channel_search(query, page, channel)
return count, items
end
-def search(query, page = 1, search_params = produce_search_params(content_type: "all"), proxies = nil, region = nil)
- client = make_client(YT_URL, proxies, region)
+def search(query, page = 1, search_params = produce_search_params(content_type: "all"), region = nil)
+ client = make_client(YT_URL, region)
if query.empty?
return {0, [] of SearchItem}
end
diff --git a/src/invidious/trending.cr b/src/invidious/trending.cr
index 8e55f207..5f9d7920 100644
--- a/src/invidious/trending.cr
+++ b/src/invidious/trending.cr
@@ -1,4 +1,4 @@
-def fetch_trending(trending_type, proxies, region, locale)
+def fetch_trending(trending_type, region, locale)
client = make_client(YT_URL)
headers = HTTP::Headers.new
headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36"
diff --git a/src/invidious/videos.cr b/src/invidious/videos.cr
index f1b8e3f8..b5803e8a 100644
--- a/src/invidious/videos.cr
+++ b/src/invidious/videos.cr
@@ -869,7 +869,7 @@ end
class VideoRedirect < Exception
end
-def get_video(id, db, proxies = {} of String => Array({ip: String, port: Int32}), refresh = true, region = nil, force_refresh = false)
+def get_video(id, db, refresh = true, region = nil, force_refresh = false)
if (video = db.query_one?("SELECT * FROM videos WHERE id = $1", id, as: Video)) && !region
# If record was last updated over 10 minutes ago, or video has since premiered,
# refresh (expire param in response lasts for 6 hours)
@@ -878,7 +878,7 @@ def get_video(id, db, proxies = {} of String => Array({ip: String, port: Int32})
(video.premiere_timestamp && video.premiere_timestamp.as(Time) < Time.utc)) ||
force_refresh
begin
- video = fetch_video(id, proxies, region)
+ video = fetch_video(id, region)
video_array = video.to_a
args = arg_array(video_array[1..-1], 2)
@@ -893,7 +893,7 @@ def get_video(id, db, proxies = {} of String => Array({ip: String, port: Int32})
end
end
else
- video = fetch_video(id, proxies, region)
+ video = fetch_video(id, region)
video_array = video.to_a
args = arg_array(video_array)
@@ -1097,8 +1097,8 @@ def extract_player_config(body, html)
return params
end
-def fetch_video(id, proxies, region)
- client = make_client(YT_URL, proxies, region)
+def fetch_video(id, region)
+ client = make_client(YT_URL, region)
response = client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999")
if md = response.headers["location"]?.try &.match(/v=(?<id>[a-zA-Z0-9_-]{11})/)
@@ -1113,9 +1113,9 @@ def fetch_video(id, proxies, region)
if info["reason"]? && info["reason"].includes? "your country"
bypass_channel = Channel({XML::Node, HTTP::Params} | Nil).new
- proxies.each do |proxy_region, list|
+ PROXY_LIST.each do |proxy_region, list|
spawn do
- client = make_client(YT_URL, proxies, proxy_region)
+ client = make_client(YT_URL, proxy_region)
proxy_response = client.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999")
proxy_html = XML.parse_html(proxy_response.body)
@@ -1131,7 +1131,7 @@ def fetch_video(id, proxies, region)
end
end
- proxies.size.times do
+ PROXY_LIST.size.times do
response = bypass_channel.receive
if response
html, info = response
@@ -1218,7 +1218,7 @@ def fetch_video(id, proxies, region)
license = html.xpath_node(%q(//h4[contains(text(),"License")]/parent::*/ul/li)).try &.content || ""
sub_count_text = html.xpath_node(%q(//span[contains(@class, "yt-subscriber-count")])).try &.["title"]? || "0"
- author_thumbnail = html.xpath_node(%(//span[@class="yt-thumb-clip"]/img)).try &.["data-thumb"]? || ""
+ author_thumbnail = html.xpath_node(%(//span[@class="yt-thumb-clip"]/img)).try &.["data-thumb"]?.try &.gsub(/^\/\//, "https://") || ""
video = Video.new(id, info, Time.utc, title, views, likes, dislikes, wilson_score, published, description_html,
nil, author, ucid, allowed_regions, is_family_friendly, genre, genre_url, license, sub_count_text, author_thumbnail)
diff --git a/src/invidious/views/channel.ecr b/src/invidious/views/channel.ecr
index 089b42e3..88be697a 100644
--- a/src/invidious/views/channel.ecr
+++ b/src/invidious/views/channel.ecr
@@ -1,37 +1,52 @@
<% content_for "header" do %>
-<title><%= author %> - Invidious</title>
-<link rel="alternate" type="application/rss+xml" title="RSS" href="/feed/channel/<%= ucid %>" />
+<title><%= channel.author %> - Invidious</title>
+<link rel="alternate" type="application/rss+xml" title="RSS" href="/feed/channel/<%= channel.ucid %>" />
+<% end %>
+
+<% if channel.banner %>
+ <div class="h-box">
+ <img style="width:100%" src="/ggpht<%= URI.parse(channel.banner.not_nil!.gsub("=w1060-", "=w1280-")).full_path %>">
+ </div>
+
+ <div class="h-box">
+ <hr>
+ </div>
<% end %>
<div class="pure-g h-box">
<div class="pure-u-2-3">
- <h3><%= author %></h3>
+ <div class="channel-profile">
+ <img src="/ggpht<%= URI.parse(channel.author_thumbnail).full_path %>">
+ <span><%= channel.author %></span>
+ </div>
</div>
<div class="pure-u-1-3" style="text-align:right">
<h3>
- <a href="/feed/channel/<%= ucid %>"><i class="icon ion-logo-rss"></i></a>
+ <a href="/feed/channel/<%= channel.ucid %>"><i class="icon ion-logo-rss"></i></a>
</h3>
</div>
</div>
<div class="h-box">
- <% sub_count_text = number_to_short_text(sub_count) %>
+ <% ucid = channel.ucid %>
+ <% author = channel.author %>
+ <% sub_count_text = number_to_short_text(channel.sub_count) %>
<%= rendered "components/subscribe_widget" %>
</div>
<div class="pure-g h-box">
<div class="pure-u-1-3">
- <a href="https://www.youtube.com/channel/<%= ucid %>"><%= translate(locale, "View channel on YouTube") %></a>
- <% if !auto_generated %>
+ <a href="https://www.youtube.com/channel/<%= channel.ucid %>"><%= translate(locale, "View channel on YouTube") %></a>
+ <% if !channel.auto_generated %>
<div class="pure-u-1 pure-md-1-3">
<b><%= translate(locale, "Videos") %></b>
</div>
<% end %>
<div class="pure-u-1 pure-md-1-3">
- <% if auto_generated %>
+ <% if channel.auto_generated %>
<b><%= translate(locale, "Playlists") %></b>
<% else %>
- <a href="/channel/<%= ucid %>/playlists"><%= translate(locale, "Playlists") %></a>
+ <a href="/channel/<%= channel.ucid %>/playlists"><%= translate(locale, "Playlists") %></a>
<% end %>
</div>
</div>
@@ -43,7 +58,7 @@
<% if sort_by == sort %>
<b><%= translate(locale, sort) %></b>
<% else %>
- <a href="/channel/<%= ucid %>?page=<%= page %>&sort_by=<%= sort %>">
+ <a href="/channel/<%= channel.ucid %>?page=<%= page %>&sort_by=<%= sort %>">
<%= translate(locale, sort) %>
</a>
<% end %>
@@ -68,7 +83,7 @@
<div class="pure-g h-box">
<div class="pure-u-1 pure-u-lg-1-5">
<% if page > 1 %>
- <a href="/channel/<%= ucid %>?page=<%= page - 1 %><% if sort_by != "newest" %>&sort_by=<%= sort_by %><% end %>">
+ <a href="/channel/<%= channel.ucid %>?page=<%= page - 1 %><% if sort_by != "newest" %>&sort_by=<%= sort_by %><% end %>">
<%= translate(locale, "Previous page") %>
</a>
<% end %>
@@ -76,7 +91,7 @@
<div class="pure-u-1 pure-u-lg-3-5"></div>
<div class="pure-u-1 pure-u-lg-1-5" style="text-align:right">
<% if count == 60 %>
- <a href="/channel/<%= ucid %>?page=<%= page + 1 %><% if sort_by != "newest" %>&sort_by=<%= sort_by %><% end %>">
+ <a href="/channel/<%= channel.ucid %>?page=<%= page + 1 %><% if sort_by != "newest" %>&sort_by=<%= sort_by %><% end %>">
<%= translate(locale, "Next page") %>
</a>
<% end %>
diff --git a/src/invidious/views/playlists.ecr b/src/invidious/views/playlists.ecr
index fd7bf4d8..8d1236aa 100644
--- a/src/invidious/views/playlists.ecr
+++ b/src/invidious/views/playlists.ecr
@@ -1,33 +1,48 @@
<% content_for "header" do %>
-<title><%= author %> - Invidious</title>
+<title><%= channel.author %> - Invidious</title>
+<% end %>
+
+<% if channel.banner %>
+ <div class="h-box">
+ <img style="width:100%" src="/ggpht<%= URI.parse(channel.banner.not_nil!.gsub("=w1060-", "=w1280-")).full_path %>">
+ </div>
+
+ <div class="h-box">
+ <hr>
+ </div>
<% end %>
<div class="pure-g h-box">
<div class="pure-u-2-3">
- <h3><%= author %></h3>
+ <div class="channel-profile">
+ <img src="/ggpht<%= URI.parse(channel.author_thumbnail).full_path %>">
+ <span><%= channel.author %></span>
+ </div>
</div>
<div class="pure-u-1-3" style="text-align:right">
<h3>
- <a href="/feed/channel/<%= ucid %>"><i class="icon ion-logo-rss"></i></a>
+ <a href="/feed/channel/<%= channel.ucid %>"><i class="icon ion-logo-rss"></i></a>
</h3>
</div>
</div>
<div class="h-box">
- <% sub_count_text = number_to_short_text(sub_count) %>
+ <% ucid = channel.ucid %>
+ <% author = channel.author %>
+ <% sub_count_text = number_to_short_text(channel.sub_count) %>
<%= rendered "components/subscribe_widget" %>
</div>
<div class="pure-g h-box">
<div class="pure-g pure-u-1-3">
<div class="pure-u-1 pure-md-1-3">
- <a href="https://www.youtube.com/channel/<%= ucid %>"><%= translate(locale, "View channel on YouTube") %></a>
+ <a href="https://www.youtube.com/channel/<%= channel.ucid %>"><%= translate(locale, "View channel on YouTube") %></a>
</div>
<div class="pure-u-1 pure-md-1-3">
- <a href="/channel/<%= ucid %>"><%= translate(locale, "Videos") %></a>
+ <a href="/channel/<%= channel.ucid %>"><%= translate(locale, "Videos") %></a>
</div>
<div class="pure-u-1 pure-md-1-3">
- <% if !auto_generated %>
+ <% if !channel.auto_generated %>
<b><%= translate(locale, "Playlists") %></b>
<% end %>
</div>
@@ -40,7 +55,7 @@
<% if sort_by == sort %>
<b><%= translate(locale, sort) %></b>
<% else %>
- <a href="/channel/<%= ucid %>/playlists?sort_by=<%= sort %>">
+ <a href="/channel/<%= channel.ucid %>/playlists?sort_by=<%= sort %>">
<%= translate(locale, sort) %>
</a>
<% end %>
@@ -66,7 +81,7 @@
<div class="pure-u-1 pure-u-md-4-5"></div>
<div class="pure-u-1 pure-u-lg-1-5" style="text-align:right">
<% if items.size >= 28 %>
- <a href="/channel/<%= ucid %>/playlists?continuation=<%= continuation %><% if sort_by != "last" %>&sort_by=<%= sort_by %><% end %>">
+ <a href="/channel/<%= channel.ucid %>/playlists?continuation=<%= continuation %><% if sort_by != "last" %>&sort_by=<%= sort_by %><% end %>">
<%= translate(locale, "Next page") %>
</a>
<% end %>
diff --git a/src/invidious/views/watch.ecr b/src/invidious/views/watch.ecr
index 36fabcc3..1d4557fa 100644
--- a/src/invidious/views/watch.ecr
+++ b/src/invidious/views/watch.ecr
@@ -83,9 +83,9 @@ var video_data = {
<div class="pure-g">
<div class="pure-u-1 pure-u-lg-1-5">
<div class="h-box">
- <p>
+ <span>
<a href="https://www.youtube.com/watch?v=<%= video.id %>"><%= translate(locale, "Watch on YouTube") %></a>
- </p>
+ </span>
<p>
<% if params.annotations %>
<a href="/watch?<%= env.params.query %>&iv_load_policy=3">
@@ -165,11 +165,12 @@ var video_data = {
<div class="pure-u-1 <% if params.related_videos || plid %>pure-u-lg-3-5<% else %>pure-u-md-4-5<% end %>">
<div class="h-box">
- <p>
- <a href="/channel/<%= video.ucid %>">
- <h3><%= video.author %></h3>
- </a>
- </p>
+ <a href="/channel/<%= video.ucid %>" style="display:block;width:fit-content;width:-moz-fit-content">
+ <div class="channel-profile">
+ <img src="/ggpht<%= URI.parse(video.author_thumbnail).full_path %>">
+ <span><%= video.author %></span>
+ </div>
+ </a>
<% ucid = video.ucid %>
<% author = video.author %>