summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/ext/kemal_content_for.cr16
-rw-r--r--src/ext/kemal_static_file_handler.cr (renamed from src/invidious/helpers/static_file_handler.cr)33
-rw-r--r--src/invidious.cr3859
-rw-r--r--src/invidious/channels.cr984
-rw-r--r--src/invidious/channels/about.cr206
-rw-r--r--src/invidious/channels/channels.cr304
-rw-r--r--src/invidious/channels/community.cr332
-rw-r--r--src/invidious/channels/playlists.cr46
-rw-r--r--src/invidious/channels/videos.cr192
-rw-r--r--src/invidious/comments.cr662
-rw-r--r--src/invidious/comments/content.cr89
-rw-r--r--src/invidious/comments/links_util.cr76
-rw-r--r--src/invidious/comments/reddit.cr41
-rw-r--r--src/invidious/comments/reddit_types.cr57
-rw-r--r--src/invidious/comments/youtube.cr365
-rw-r--r--src/invidious/config.cr256
-rw-r--r--src/invidious/database/annotations.cr24
-rw-r--r--src/invidious/database/base.cr136
-rw-r--r--src/invidious/database/channels.cr158
-rw-r--r--src/invidious/database/migration.cr38
-rw-r--r--src/invidious/database/migrations/0001_create_channels_table.cr30
-rw-r--r--src/invidious/database/migrations/0002_create_videos_table.cr28
-rw-r--r--src/invidious/database/migrations/0003_create_channel_videos_table.cr35
-rw-r--r--src/invidious/database/migrations/0004_create_users_table.cr34
-rw-r--r--src/invidious/database/migrations/0005_create_session_ids_table.cr28
-rw-r--r--src/invidious/database/migrations/0006_create_nonces_table.cr27
-rw-r--r--src/invidious/database/migrations/0007_create_annotations_table.cr20
-rw-r--r--src/invidious/database/migrations/0008_create_playlists_table.cr50
-rw-r--r--src/invidious/database/migrations/0009_create_playlist_videos_table.cr27
-rw-r--r--src/invidious/database/migrations/0010_make_videos_unlogged.cr11
-rw-r--r--src/invidious/database/migrator.cr49
-rw-r--r--src/invidious/database/nonces.cr55
-rw-r--r--src/invidious/database/playlists.cr262
-rw-r--r--src/invidious/database/sessions.cr74
-rw-r--r--src/invidious/database/statistics.cr49
-rw-r--r--src/invidious/database/users.cr228
-rw-r--r--src/invidious/database/videos.cr52
-rw-r--r--src/invidious/exceptions.cr40
-rw-r--r--src/invidious/frontend/channel_page.cr46
-rw-r--r--src/invidious/frontend/comments_reddit.cr50
-rw-r--r--src/invidious/frontend/comments_youtube.cr208
-rw-r--r--src/invidious/frontend/misc.cr14
-rw-r--r--src/invidious/frontend/pagination.cr97
-rw-r--r--src/invidious/frontend/search_filters.cr135
-rw-r--r--src/invidious/frontend/watch_page.cr107
-rw-r--r--src/invidious/hashtag.cr42
-rw-r--r--src/invidious/helpers/crystal_class_overrides.cr104
-rw-r--r--src/invidious/helpers/errors.cr165
-rw-r--r--src/invidious/helpers/handlers.cr69
-rw-r--r--src/invidious/helpers/helpers.cr625
-rw-r--r--src/invidious/helpers/i18n.cr218
-rw-r--r--src/invidious/helpers/i18next.cr566
-rw-r--r--src/invidious/helpers/json_filter.cr248
-rw-r--r--src/invidious/helpers/logger.cr43
-rw-r--r--src/invidious/helpers/macros.cr21
-rw-r--r--src/invidious/helpers/proxy.cr316
-rw-r--r--src/invidious/helpers/serialized_yt_data.cr317
-rw-r--r--src/invidious/helpers/sig_helper.cr349
-rw-r--r--src/invidious/helpers/signatures.cr100
-rw-r--r--src/invidious/helpers/tokens.cr25
-rw-r--r--src/invidious/helpers/utils.cr278
-rw-r--r--src/invidious/helpers/webvtt.cr81
-rw-r--r--src/invidious/helpers/youtube_api.cr31
-rw-r--r--src/invidious/http_server/utils.cr41
-rw-r--r--src/invidious/jobs.cr27
-rw-r--r--src/invidious/jobs/base_job.cr30
-rw-r--r--src/invidious/jobs/bypass_captcha_job.cr131
-rw-r--r--src/invidious/jobs/clear_expired_items_job.cr27
-rw-r--r--src/invidious/jobs/instance_refresh_job.cr97
-rw-r--r--src/invidious/jobs/notification_job.cr4
-rw-r--r--src/invidious/jobs/pull_popular_videos_job.cr13
-rw-r--r--src/invidious/jobs/refresh_channels_job.cr16
-rw-r--r--src/invidious/jobs/refresh_feeds_job.cr4
-rw-r--r--src/invidious/jobs/statistics_refresh_job.cr22
-rw-r--r--src/invidious/jobs/subscribe_to_feeds_job.cr2
-rw-r--r--src/invidious/jobs/update_decrypt_function_job.cr14
-rw-r--r--src/invidious/jsonify/api_v1/common.cr18
-rw-r--r--src/invidious/jsonify/api_v1/video_json.cr295
-rw-r--r--src/invidious/mixes.cr4
-rw-r--r--src/invidious/playlists.cr213
-rw-r--r--src/invidious/routes/account.cr354
-rw-r--r--src/invidious/routes/api/manifest.cr241
-rw-r--r--src/invidious/routes/api/v1/authenticated.cr490
-rw-r--r--src/invidious/routes/api/v1/channels.cr516
-rw-r--r--src/invidious/routes/api/v1/feeds.cr45
-rw-r--r--src/invidious/routes/api/v1/misc.cr203
-rw-r--r--src/invidious/routes/api/v1/search.cr87
-rw-r--r--src/invidious/routes/api/v1/videos.cr432
-rw-r--r--src/invidious/routes/base_route.cr2
-rw-r--r--src/invidious/routes/before_all.cr126
-rw-r--r--src/invidious/routes/channels.cr423
-rw-r--r--src/invidious/routes/embed.cr51
-rw-r--r--src/invidious/routes/errors.cr52
-rw-r--r--src/invidious/routes/feeds.cr462
-rw-r--r--src/invidious/routes/images.cr153
-rw-r--r--src/invidious/routes/login.cr368
-rw-r--r--src/invidious/routes/misc.cr33
-rw-r--r--src/invidious/routes/notifications.cr34
-rw-r--r--src/invidious/routes/playlists.cr237
-rw-r--r--src/invidious/routes/preferences.cr241
-rw-r--r--src/invidious/routes/search.cr112
-rw-r--r--src/invidious/routes/subscriptions.cr130
-rw-r--r--src/invidious/routes/video_playback.cr303
-rw-r--r--src/invidious/routes/watch.cr212
-rw-r--r--src/invidious/routing.cr317
-rw-r--r--src/invidious/search.cr473
-rw-r--r--src/invidious/search/ctoken.cr32
-rw-r--r--src/invidious/search/filters.cr376
-rw-r--r--src/invidious/search/processors.cr56
-rw-r--r--src/invidious/search/query.cr168
-rw-r--r--src/invidious/trending.cr59
-rw-r--r--src/invidious/user/captcha.cr78
-rw-r--r--src/invidious/user/converters.cr12
-rw-r--r--src/invidious/user/cookies.cr39
-rw-r--r--src/invidious/user/exports.cr35
-rw-r--r--src/invidious/user/imports.cr337
-rw-r--r--src/invidious/user/preferences.cr275
-rw-r--r--src/invidious/user/user.cr27
-rw-r--r--src/invidious/users.cr502
-rw-r--r--src/invidious/videos.cr1212
-rw-r--r--src/invidious/videos/caption.cr224
-rw-r--r--src/invidious/videos/clip.cr22
-rw-r--r--src/invidious/videos/description.cr82
-rw-r--r--src/invidious/videos/formats.cr116
-rw-r--r--src/invidious/videos/music.cr13
-rw-r--r--src/invidious/videos/parser.cr489
-rw-r--r--src/invidious/videos/regions.cr27
-rw-r--r--src/invidious/videos/storyboard.cr122
-rw-r--r--src/invidious/videos/transcript.cr126
-rw-r--r--src/invidious/videos/video_preferences.cr162
-rw-r--r--src/invidious/views/add_playlist_items.ecr31
-rw-r--r--src/invidious/views/channel.ecr144
-rw-r--r--src/invidious/views/community.ecr74
-rw-r--r--src/invidious/views/components/channel_info.ecr61
-rw-r--r--src/invidious/views/components/item.ecr332
-rw-r--r--src/invidious/views/components/items_paginated.ecr11
-rw-r--r--src/invidious/views/components/player.ecr55
-rw-r--r--src/invidious/views/components/player_sources.ecr33
-rw-r--r--src/invidious/views/components/search_box.ecr12
-rw-r--r--src/invidious/views/components/subscribe_widget.ecr10
-rw-r--r--src/invidious/views/components/video-context-buttons.ecr21
-rw-r--r--src/invidious/views/create_playlist.ecr2
-rw-r--r--src/invidious/views/delete_playlist.ecr2
-rw-r--r--src/invidious/views/edit_playlist.ecr101
-rw-r--r--src/invidious/views/embed.ecr8
-rw-r--r--src/invidious/views/error.ecr1
-rw-r--r--src/invidious/views/feeds/history.ecr59
-rw-r--r--src/invidious/views/feeds/playlists.ecr43
-rw-r--r--src/invidious/views/feeds/popular.ecr (renamed from src/invidious/views/popular.ecr)10
-rw-r--r--src/invidious/views/feeds/subscriptions.ecr74
-rw-r--r--src/invidious/views/feeds/trending.ecr (renamed from src/invidious/views/trending.ecr)10
-rw-r--r--src/invidious/views/hashtag.ecr8
-rw-r--r--src/invidious/views/history.ecr75
-rw-r--r--src/invidious/views/licenses.ecr64
-rw-r--r--src/invidious/views/mix.ecr16
-rw-r--r--src/invidious/views/playlist.ecr131
-rw-r--r--src/invidious/views/playlists.ecr98
-rw-r--r--src/invidious/views/post.ecr48
-rw-r--r--src/invidious/views/privacy.ecr3
-rw-r--r--src/invidious/views/search.ecr141
-rw-r--r--src/invidious/views/search_homepage.ecr8
-rw-r--r--src/invidious/views/subscriptions.ecr81
-rw-r--r--src/invidious/views/template.ecr101
-rw-r--r--src/invidious/views/user/authorize_token.ecr (renamed from src/invidious/views/authorize_token.ecr)10
-rw-r--r--src/invidious/views/user/change_password.ecr (renamed from src/invidious/views/change_password.ecr)2
-rw-r--r--src/invidious/views/user/clear_watch_history.ecr (renamed from src/invidious/views/clear_watch_history.ecr)2
-rw-r--r--src/invidious/views/user/data_control.ecr (renamed from src/invidious/views/data_control.ecr)14
-rw-r--r--src/invidious/views/user/delete_account.ecr (renamed from src/invidious/views/delete_account.ecr)2
-rw-r--r--src/invidious/views/user/login.ecr (renamed from src/invidious/views/login.ecr)57
-rw-r--r--src/invidious/views/user/preferences.ecr (renamed from src/invidious/views/preferences.ecr)128
-rw-r--r--src/invidious/views/user/subscription_manager.ecr (renamed from src/invidious/views/subscription_manager.ecr)18
-rw-r--r--src/invidious/views/user/token_manager.ecr (renamed from src/invidious/views/token_manager.ecr)8
-rw-r--r--src/invidious/views/view_all_playlists.ecr38
-rw-r--r--src/invidious/views/watch.ecr260
-rw-r--r--src/invidious/yt_backend/connection_pool.cr116
-rw-r--r--src/invidious/yt_backend/extractors.cr1042
-rw-r--r--src/invidious/yt_backend/extractors_utils.cr87
-rw-r--r--src/invidious/yt_backend/url_sanitizer.cr121
-rw-r--r--src/invidious/yt_backend/youtube_api.cr699
179 files changed, 18203 insertions, 11143 deletions
diff --git a/src/ext/kemal_content_for.cr b/src/ext/kemal_content_for.cr
new file mode 100644
index 00000000..a4f3fd96
--- /dev/null
+++ b/src/ext/kemal_content_for.cr
@@ -0,0 +1,16 @@
+# Overrides for Kemal's `content_for` macro in order to keep using
+# kilt as it was before Kemal v1.1.1 (Kemal PR #618).
+
+require "kemal"
+require "kilt"
+
+macro content_for(key, file = __FILE__)
+ %proc = ->() {
+ __kilt_io__ = IO::Memory.new
+ {{ yield }}
+ __kilt_io__.to_s
+ }
+
+ CONTENT_FOR_BLOCKS[{{key}}] = Tuple.new {{file}}, %proc
+ nil
+end
diff --git a/src/invidious/helpers/static_file_handler.cr b/src/ext/kemal_static_file_handler.cr
index be9d36ab..eb068aeb 100644
--- a/src/invidious/helpers/static_file_handler.cr
+++ b/src/ext/kemal_static_file_handler.cr
@@ -111,7 +111,7 @@ module Kemal
if @fallthrough
call_next(context)
else
- context.response.status_code = 405
+ context.response.status = HTTP::Status::METHOD_NOT_ALLOWED
context.response.headers.add("Allow", "GET, HEAD")
end
return
@@ -124,7 +124,7 @@ module Kemal
# File path cannot contains '\0' (NUL) because all filesystem I know
# don't accept '\0' character as file name.
if request_path.includes? '\0'
- context.response.status_code = 400
+ context.response.status = HTTP::Status::BAD_REQUEST
return
end
@@ -143,13 +143,15 @@ module Kemal
add_cache_headers(context.response.headers, last_modified)
if cache_request?(context, last_modified)
- context.response.status_code = 304
+ context.response.status = HTTP::Status::NOT_MODIFIED
return
end
send_file(context, file_path, file[:data], file[:filestat])
else
- is_dir = Dir.exists? file_path
+ file_info = File.info?(file_path)
+ is_dir = file_info.try &.directory? || false
+ is_file = file_info.try &.file? || false
if request_path != expanded_path
redirect_to context, expanded_path
@@ -157,35 +159,34 @@ module Kemal
redirect_to context, expanded_path + '/'
end
- if Dir.exists?(file_path)
+ return call_next(context) if file_info.nil?
+
+ if is_dir
if config.is_a?(Hash) && config["dir_listing"] == true
context.response.content_type = "text/html"
directory_listing(context.response, request_path, file_path)
else
call_next(context)
end
- elsif File.exists?(file_path)
- last_modified = modification_time(file_path)
+ elsif is_file
+ last_modified = file_info.modification_time
add_cache_headers(context.response.headers, last_modified)
if cache_request?(context, last_modified)
- context.response.status_code = 304
+ context.response.status = HTTP::Status::NOT_MODIFIED
return
end
- if @cached_files.sum { |element| element[1][:data].bytesize } + (size = File.size(file_path)) < CACHE_LIMIT
+ if @cached_files.sum(&.[1][:data].bytesize) + (size = File.size(file_path)) < CACHE_LIMIT
data = Bytes.new(size)
- File.open(file_path) do |file|
- file.read(data)
- end
- filestat = File.info(file_path)
+ File.open(file_path, &.read(data))
- @cached_files[file_path] = {data: data, filestat: filestat}
- send_file(context, file_path, data, filestat)
+ @cached_files[file_path] = {data: data, filestat: file_info}
+ send_file(context, file_path, data, file_info)
else
send_file(context, file_path)
end
- else
+ else # Not a normal file (FIFO/device/socket)
call_next(context)
end
end
diff --git a/src/invidious.cr b/src/invidious.cr
index 65b1091b..0be73555 100644
--- a/src/invidious.cr
+++ b/src/invidious.cr
@@ -16,31 +16,57 @@
require "digest/md5"
require "file_utils"
+
+# Require kemal, kilt, then our own overrides
require "kemal"
+require "kilt"
+require "./ext/kemal_content_for.cr"
+require "./ext/kemal_static_file_handler.cr"
+
+require "http_proxy"
+require "athena-negotiation"
require "openssl/hmac"
require "option_parser"
-require "pg"
require "sqlite3"
require "xml"
require "yaml"
require "compress/zip"
require "protodec/utils"
+
+require "./invidious/database/*"
+require "./invidious/database/migrations/*"
+require "./invidious/http_server/*"
require "./invidious/helpers/*"
+require "./invidious/yt_backend/*"
+require "./invidious/frontend/*"
+require "./invidious/videos/*"
+
+require "./invidious/jsonify/**"
+
require "./invidious/*"
+require "./invidious/comments/*"
+require "./invidious/channels/*"
+require "./invidious/user/*"
+require "./invidious/search/*"
require "./invidious/routes/**"
require "./invidious/jobs/**"
+# Declare the base namespace for invidious
+module Invidious
+end
+
+# Simple alias to make code easier to read
+alias IV = Invidious
+
CONFIG = Config.load
-HMAC_KEY = CONFIG.hmac_key || Random::Secure.hex(32)
+HMAC_KEY = CONFIG.hmac_key
-PG_DB = DB.open CONFIG.database_url
-ARCHIVE_URL = URI.parse("https://archive.org")
-LOGIN_URL = URI.parse("https://accounts.google.com")
-PUBSUB_URL = URI.parse("https://pubsubhubbub.appspot.com")
-REDDIT_URL = URI.parse("https://www.reddit.com")
-TEXTCAPTCHA_URL = URI.parse("https://textcaptcha.com")
-YT_URL = URI.parse("https://www.youtube.com")
-HOST_URL = make_host_url(Kemal.config)
+PG_DB = DB.open CONFIG.database_url
+ARCHIVE_URL = URI.parse("https://archive.org")
+PUBSUB_URL = URI.parse("https://pubsubhubbub.appspot.com")
+REDDIT_URL = URI.parse("https://www.reddit.com")
+YT_URL = URI.parse("https://www.youtube.com")
+HOST_URL = make_host_url(Kemal.config)
CHARS_SAFE = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_"
TEST_IDS = {"AgbeGFYluEA", "BaW_jenozKc", "a9LDPn-MO4I", "ddFvjfvPnqk", "iqKdEhx-dD4"}
@@ -65,7 +91,11 @@ SOFTWARE = {
"branch" => "#{CURRENT_BRANCH}",
}
-YT_POOL = YoutubeConnectionPool.new(YT_URL, capacity: CONFIG.pool_size, timeout: 2.0, use_quic: CONFIG.use_quic)
+YT_POOL = YoutubeConnectionPool.new(YT_URL, capacity: CONFIG.pool_size)
+
+# Image request pool
+
+GGPHT_POOL = YoutubeConnectionPool.new(URI.parse("https://yt3.ggpht.com"), capacity: CONFIG.pool_size)
# CLI
Kemal.config.extra_options do |parser|
@@ -92,10 +122,17 @@ Kemal.config.extra_options do |parser|
parser.on("-l LEVEL", "--log-level=LEVEL", "Log level, one of #{LogLevel.values} (default: #{CONFIG.log_level})") do |log_level|
CONFIG.log_level = LogLevel.parse(log_level)
end
+ parser.on("-k", "--colorize", "Colorize logs") do
+ CONFIG.colorize_logs = true
+ end
parser.on("-v", "--version", "Print version") do
puts SOFTWARE.to_pretty_json
exit
end
+ parser.on("--migrate", "Run any migrations (beta, use at your own risk!!") do
+ Invidious::Database::Migrator.new(PG_DB).migrate
+ exit
+ end
end
Kemal::CLI.new ARGV
@@ -104,25 +141,34 @@ if CONFIG.output.upcase != "STDOUT"
FileUtils.mkdir_p(File.dirname(CONFIG.output))
end
OUTPUT = CONFIG.output.upcase == "STDOUT" ? STDOUT : File.open(CONFIG.output, mode: "a")
-LOGGER = Invidious::LogHandler.new(OUTPUT, CONFIG.log_level)
+LOGGER = Invidious::LogHandler.new(OUTPUT, CONFIG.log_level, CONFIG.colorize_logs)
# Check table integrity
-if CONFIG.check_tables
- check_enum(PG_DB, "privacy", PlaylistPrivacy)
+Invidious::Database.check_integrity(CONFIG)
+
+{% if !flag?(:skip_videojs_download) %}
+ # Resolve player dependencies. This is done at compile time.
+ #
+ # Running the script by itself would show some colorful feedback while this doesn't.
+ # Perhaps we should just move the script to runtime in order to get that feedback?
- check_table(PG_DB, "channels", InvidiousChannel)
- check_table(PG_DB, "channel_videos", ChannelVideo)
- check_table(PG_DB, "playlists", InvidiousPlaylist)
- check_table(PG_DB, "playlist_videos", PlaylistVideo)
- check_table(PG_DB, "nonces", Nonce)
- check_table(PG_DB, "session_ids", SessionId)
- check_table(PG_DB, "users", User)
- check_table(PG_DB, "videos", Video)
+ {% puts "\nChecking player dependencies, this may take more than 20 minutes... If it is stuck, check your internet connection.\n" %}
+ {% if flag?(:minified_player_dependencies) %}
+ {% puts run("../scripts/fetch-player-dependencies.cr", "--minified").stringify %}
+ {% else %}
+ {% puts run("../scripts/fetch-player-dependencies.cr").stringify %}
+ {% end %}
+ {% puts "\nDone checking player dependencies, now compiling Invidious...\n" %}
+{% end %}
- if CONFIG.cache_annotations
- check_table(PG_DB, "annotations", Annotation)
+# Misc
+
+DECRYPT_FUNCTION =
+ if sig_helper_address = CONFIG.signature_server.presence
+ IV::DecryptFunction.new(sig_helper_address)
+ else
+ nil
end
-end
# Start jobs
@@ -134,11 +180,6 @@ if CONFIG.feed_threads > 0
Invidious::Jobs.register Invidious::Jobs::RefreshFeedsJob.new(PG_DB)
end
-DECRYPT_FUNCTION = DecryptFunction.new(CONFIG.decrypt_polling)
-if CONFIG.decrypt_polling
- Invidious::Jobs.register Invidious::Jobs::UpdateDecryptFunctionJob.new
-end
-
if CONFIG.statistics_enabled
Invidious::Jobs.register Invidious::Jobs::StatisticsRefreshJob.new(PG_DB, SOFTWARE)
end
@@ -151,12 +192,12 @@ if CONFIG.popular_enabled
Invidious::Jobs.register Invidious::Jobs::PullPopularVideosJob.new(PG_DB)
end
-if CONFIG.captcha_key
- Invidious::Jobs.register Invidious::Jobs::BypassCaptchaJob.new
-end
+CONNECTION_CHANNEL = ::Channel({Bool, ::Channel(PQ::Notification)}).new(32)
+Invidious::Jobs.register Invidious::Jobs::NotificationJob.new(CONNECTION_CHANNEL, CONFIG.database_url)
+
+Invidious::Jobs.register Invidious::Jobs::ClearExpiredItemsJob.new
-connection_channel = Channel({Bool, Channel(PQ::Notification)}).new(32)
-Invidious::Jobs.register Invidious::Jobs::NotificationJob.new(connection_channel, CONFIG.database_url)
+Invidious::Jobs.register Invidious::Jobs::InstanceListRefreshJob.new
Invidious::Jobs.start_all
@@ -164,3747 +205,28 @@ def popular_videos
Invidious::Jobs::PullPopularVideosJob::POPULAR_VIDEOS.get
end
-before_all do |env|
- preferences = begin
- Preferences.from_json(env.request.cookies["PREFS"]?.try &.value || "{}")
- rescue
- Preferences.from_json("{}")
- end
-
- env.set "preferences", preferences
- env.response.headers["X-XSS-Protection"] = "1; mode=block"
- env.response.headers["X-Content-Type-Options"] = "nosniff"
- extra_media_csp = ""
- if CONFIG.disabled?("local") || !preferences.local
- extra_media_csp += " https://*.googlevideo.com:443"
- extra_media_csp += " https://*.youtube.com:443"
- end
- # TODO: Remove style-src's 'unsafe-inline', requires to remove all inline styles (<style> [..] </style>, style=" [..] ")
- env.response.headers["Content-Security-Policy"] = "default-src 'none'; script-src 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; font-src 'self' data:; connect-src 'self'; manifest-src 'self'; media-src 'self' blob:#{extra_media_csp}; child-src blob:"
- env.response.headers["Referrer-Policy"] = "same-origin"
-
- if (Kemal.config.ssl || CONFIG.https_only) && CONFIG.hsts
- env.response.headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains; preload"
- end
-
- next if {
- "/sb/",
- "/vi/",
- "/s_p/",
- "/yts/",
- "/ggpht/",
- "/api/manifest/",
- "/videoplayback",
- "/latest_version",
- }.any? { |r| env.request.resource.starts_with? r }
-
- if env.request.cookies.has_key? "SID"
- sid = env.request.cookies["SID"].value
-
- if sid.starts_with? "v1:"
- raise "Cannot use token as SID"
- end
-
- # Invidious users only have SID
- if !env.request.cookies.has_key? "SSID"
- if email = PG_DB.query_one?("SELECT email FROM session_ids WHERE id = $1", sid, as: String)
- user = PG_DB.query_one("SELECT * FROM users WHERE email = $1", email, as: User)
- csrf_token = generate_response(sid, {
- ":authorize_token",
- ":playlist_ajax",
- ":signout",
- ":subscription_ajax",
- ":token_ajax",
- ":watch_ajax",
- }, HMAC_KEY, PG_DB, 1.week)
-
- preferences = user.preferences
- env.set "preferences", preferences
-
- env.set "sid", sid
- env.set "csrf_token", csrf_token
- env.set "user", user
- end
- else
- headers = HTTP::Headers.new
- headers["Cookie"] = env.request.headers["Cookie"]
-
- begin
- user, sid = get_user(sid, headers, PG_DB, false)
- csrf_token = generate_response(sid, {
- ":authorize_token",
- ":playlist_ajax",
- ":signout",
- ":subscription_ajax",
- ":token_ajax",
- ":watch_ajax",
- }, HMAC_KEY, PG_DB, 1.week)
-
- preferences = user.preferences
- env.set "preferences", preferences
-
- env.set "sid", sid
- env.set "csrf_token", csrf_token
- env.set "user", user
- rescue ex
- end
- end
- end
-
- dark_mode = convert_theme(env.params.query["dark_mode"]?) || preferences.dark_mode.to_s
- thin_mode = env.params.query["thin_mode"]? || preferences.thin_mode.to_s
- thin_mode = thin_mode == "true"
- locale = env.params.query["hl"]? || preferences.locale
-
- preferences.dark_mode = dark_mode
- preferences.thin_mode = thin_mode
- preferences.locale = locale
- env.set "preferences", preferences
-
- current_page = env.request.path
- if env.request.query
- query = HTTP::Params.parse(env.request.query.not_nil!)
-
- if query["referer"]?
- query["referer"] = get_referer(env, "/")
- end
-
- current_page += "?#{query}"
- end
-
- env.set "current_page", URI.encode_www_form(current_page)
-end
-
-Invidious::Routing.get "/", Invidious::Routes::Misc, :home
-Invidious::Routing.get "/privacy", Invidious::Routes::Misc, :privacy
-Invidious::Routing.get "/licenses", Invidious::Routes::Misc, :licenses
-
-Invidious::Routing.get "/watch", Invidious::Routes::Watch, :handle
-Invidious::Routing.get "/watch/:id", Invidious::Routes::Watch, :redirect
-Invidious::Routing.get "/shorts/:id", Invidious::Routes::Watch, :redirect
-Invidious::Routing.get "/w/:id", Invidious::Routes::Watch, :redirect
-Invidious::Routing.get "/v/:id", Invidious::Routes::Watch, :redirect
-Invidious::Routing.get "/e/:id", Invidious::Routes::Watch, :redirect
-
-Invidious::Routing.get "/embed/", Invidious::Routes::Embed, :redirect
-Invidious::Routing.get "/embed/:id", Invidious::Routes::Embed, :show
-
-Invidious::Routing.get "/view_all_playlists", Invidious::Routes::Playlists, :index
-Invidious::Routing.get "/create_playlist", Invidious::Routes::Playlists, :new
-Invidious::Routing.post "/create_playlist", Invidious::Routes::Playlists, :create
-Invidious::Routing.get "/subscribe_playlist", Invidious::Routes::Playlists, :subscribe
-Invidious::Routing.get "/delete_playlist", Invidious::Routes::Playlists, :delete_page
-Invidious::Routing.post "/delete_playlist", Invidious::Routes::Playlists, :delete
-Invidious::Routing.get "/edit_playlist", Invidious::Routes::Playlists, :edit
-Invidious::Routing.post "/edit_playlist", Invidious::Routes::Playlists, :update
-Invidious::Routing.get "/add_playlist_items", Invidious::Routes::Playlists, :add_playlist_items_page
-Invidious::Routing.post "/playlist_ajax", Invidious::Routes::Playlists, :playlist_ajax
-Invidious::Routing.get "/playlist", Invidious::Routes::Playlists, :show
-Invidious::Routing.get "/mix", Invidious::Routes::Playlists, :mix
-
-Invidious::Routing.get "/opensearch.xml", Invidious::Routes::Search, :opensearch
-Invidious::Routing.get "/results", Invidious::Routes::Search, :results
-Invidious::Routing.get "/search", Invidious::Routes::Search, :search
-
-Invidious::Routing.get "/login", Invidious::Routes::Login, :login_page
-Invidious::Routing.post "/login", Invidious::Routes::Login, :login
-Invidious::Routing.post "/signout", Invidious::Routes::Login, :signout
-
-Invidious::Routing.get "/preferences", Invidious::Routes::PreferencesRoute, :show
-Invidious::Routing.post "/preferences", Invidious::Routes::PreferencesRoute, :update
-Invidious::Routing.get "/toggle_theme", Invidious::Routes::PreferencesRoute, :toggle_theme
-
-# Users
-
-post "/watch_ajax" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- sid = env.get? "sid"
- referer = get_referer(env, "/feed/subscriptions")
-
- redirect = env.params.query["redirect"]?
- redirect ||= "true"
- redirect = redirect == "true"
-
- if !user
- if redirect
- next env.redirect referer
- else
- next error_json(403, "No such user")
- end
- end
-
- user = user.as(User)
- sid = sid.as(String)
- token = env.params.body["csrf_token"]?
-
- id = env.params.query["id"]?
- if !id
- env.response.status_code = 400
- next
- end
-
- begin
- validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
- rescue ex
- if redirect
- next error_template(400, ex)
- else
- next error_json(400, ex)
- end
- end
-
- if env.params.query["action_mark_watched"]?
- action = "action_mark_watched"
- elsif env.params.query["action_mark_unwatched"]?
- action = "action_mark_unwatched"
- else
- next env.redirect referer
- end
-
- case action
- when "action_mark_watched"
- if !user.watched.includes? id
- PG_DB.exec("UPDATE users SET watched = array_append(watched, $1) WHERE email = $2", id, user.email)
- end
- when "action_mark_unwatched"
- PG_DB.exec("UPDATE users SET watched = array_remove(watched, $1) WHERE email = $2", id, user.email)
- else
- next error_json(400, "Unsupported action #{action}")
- end
-
- if redirect
- env.redirect referer
- else
- env.response.content_type = "application/json"
- "{}"
- end
-end
-
-# /modify_notifications
-# will "ding" all subscriptions.
-# /modify_notifications?receive_all_updates=false&receive_no_updates=false
-# will "unding" all subscriptions.
-get "/modify_notifications" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- sid = env.get? "sid"
- referer = get_referer(env, "/")
-
- redirect = env.params.query["redirect"]?
- redirect ||= "false"
- redirect = redirect == "true"
-
- if !user
- if redirect
- next env.redirect referer
- else
- next error_json(403, "No such user")
- end
- end
-
- user = user.as(User)
-
- if !user.password
- channel_req = {} of String => String
-
- channel_req["receive_all_updates"] = env.params.query["receive_all_updates"]? || "true"
- channel_req["receive_no_updates"] = env.params.query["receive_no_updates"]? || ""
- channel_req["receive_post_updates"] = env.params.query["receive_post_updates"]? || "true"
-
- channel_req.reject! { |k, v| v != "true" && v != "false" }
-
- headers = HTTP::Headers.new
- headers["Cookie"] = env.request.headers["Cookie"]
-
- html = YT_POOL.client &.get("/subscription_manager?disable_polymer=1", headers)
-
- cookies = HTTP::Cookies.from_headers(headers)
- html.cookies.each do |cookie|
- if {"VISITOR_INFO1_LIVE", "YSC", "SIDCC"}.includes? cookie.name
- if cookies[cookie.name]?
- cookies[cookie.name] = cookie
- else
- cookies << cookie
- end
- end
- end
- headers = cookies.add_request_headers(headers)
-
- if match = html.body.match(/'XSRF_TOKEN': "(?<session_token>[^"]+)"/)
- session_token = match["session_token"]
- else
- next env.redirect referer
- end
-
- headers["content-type"] = "application/x-www-form-urlencoded"
- channel_req["session_token"] = session_token
-
- subs = XML.parse_html(html.body)
- subs.xpath_nodes(%q(//a[@class="subscription-title yt-uix-sessionlink"]/@href)).each do |channel|
- channel_id = channel.content.lstrip("/channel/").not_nil!
- channel_req["channel_id"] = channel_id
-
- YT_POOL.client &.post("/subscription_ajax?action_update_subscription_preferences=1", headers, form: channel_req)
- end
- end
-
- if redirect
- env.redirect referer
- else
- env.response.content_type = "application/json"
- "{}"
- end
-end
-
-post "/subscription_ajax" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- sid = env.get? "sid"
- referer = get_referer(env, "/")
-
- redirect = env.params.query["redirect"]?
- redirect ||= "true"
- redirect = redirect == "true"
-
- if !user
- if redirect
- next env.redirect referer
- else
- next error_json(403, "No such user")
- end
- end
-
- user = user.as(User)
- sid = sid.as(String)
- token = env.params.body["csrf_token"]?
-
- begin
- validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
- rescue ex
- if redirect
- next error_template(400, ex)
- else
- next error_json(400, ex)
- end
- end
-
- if env.params.query["action_create_subscription_to_channel"]?.try &.to_i?.try &.== 1
- action = "action_create_subscription_to_channel"
- elsif env.params.query["action_remove_subscriptions"]?.try &.to_i?.try &.== 1
- action = "action_remove_subscriptions"
- else
- next env.redirect referer
- end
-
- channel_id = env.params.query["c"]?
- channel_id ||= ""
-
- if !user.password
- # Sync subscriptions with YouTube
- subscribe_ajax(channel_id, action, env.request.headers)
- end
- email = user.email
-
- case action
- when "action_create_subscription_to_channel"
- if !user.subscriptions.includes? channel_id
- get_channel(channel_id, PG_DB, false, false)
- PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = array_append(subscriptions, $1) WHERE email = $2", channel_id, email)
- end
- when "action_remove_subscriptions"
- PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = array_remove(subscriptions, $1) WHERE email = $2", channel_id, email)
- else
- next error_json(400, "Unsupported action #{action}")
- end
-
- if redirect
- env.redirect referer
- else
- env.response.content_type = "application/json"
- "{}"
- end
-end
-
-get "/subscription_manager" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- sid = env.get? "sid"
- referer = get_referer(env)
-
- if !user
- next env.redirect referer
- end
-
- user = user.as(User)
-
- if !user.password
- # Refresh account
- headers = HTTP::Headers.new
- headers["Cookie"] = env.request.headers["Cookie"]
-
- user, sid = get_user(sid, headers, PG_DB)
- end
-
- action_takeout = env.params.query["action_takeout"]?.try &.to_i?
- action_takeout ||= 0
- action_takeout = action_takeout == 1
-
- format = env.params.query["format"]?
- format ||= "rss"
-
- if user.subscriptions.empty?
- values = "'{}'"
- else
- values = "VALUES #{user.subscriptions.map { |id| %(('#{id}')) }.join(",")}"
- end
-
- subscriptions = PG_DB.query_all("SELECT * FROM channels WHERE id = ANY(#{values})", as: InvidiousChannel)
- subscriptions.sort_by! { |channel| channel.author.downcase }
-
- if action_takeout
- if format == "json"
- env.response.content_type = "application/json"
- env.response.headers["content-disposition"] = "attachment"
- playlists = PG_DB.query_all("SELECT * FROM playlists WHERE author = $1 AND id LIKE 'IV%' ORDER BY created", user.email, as: InvidiousPlaylist)
-
- next JSON.build do |json|
- json.object do
- json.field "subscriptions", user.subscriptions
- json.field "watch_history", user.watched
- json.field "preferences", user.preferences
- json.field "playlists" do
- json.array do
- playlists.each do |playlist|
- json.object do
- json.field "title", playlist.title
- json.field "description", html_to_content(playlist.description_html)
- json.field "privacy", playlist.privacy.to_s
- json.field "videos" do
- json.array do
- PG_DB.query_all("SELECT id FROM playlist_videos WHERE plid = $1 ORDER BY array_position($2, index) LIMIT 500", playlist.id, playlist.index, as: String).each do |video_id|
- json.string video_id
- end
- end
- end
- end
- end
- end
- end
- end
- end
- else
- env.response.content_type = "application/xml"
- env.response.headers["content-disposition"] = "attachment"
- export = XML.build do |xml|
- xml.element("opml", version: "1.1") do
- xml.element("body") do
- if format == "newpipe"
- title = "YouTube Subscriptions"
- else
- title = "Invidious Subscriptions"
- end
-
- xml.element("outline", text: title, title: title) do
- subscriptions.each do |channel|
- if format == "newpipe"
- xmlUrl = "https://www.youtube.com/feeds/videos.xml?channel_id=#{channel.id}"
- else
- xmlUrl = "#{HOST_URL}/feed/channel/#{channel.id}"
- end
-
- xml.element("outline", text: channel.author, title: channel.author,
- "type": "rss", xmlUrl: xmlUrl)
- end
- end
- end
- end
- end
-
- next export.gsub(%(<?xml version="1.0"?>\n), "")
- end
- end
-
- templated "subscription_manager"
-end
-
-get "/data_control" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- referer = get_referer(env)
-
- if !user
- next env.redirect referer
- end
-
- user = user.as(User)
-
- templated "data_control"
-end
-
-post "/data_control" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- referer = get_referer(env)
-
- if user
- user = user.as(User)
-
- # TODO: Find a way to prevent browser timeout
-
- HTTP::FormData.parse(env.request) do |part|
- body = part.body.gets_to_end
- next if body.empty?
-
- # TODO: Unify into single import based on content-type
- case part.name
- when "import_invidious"
- body = JSON.parse(body)
-
- if body["subscriptions"]?
- user.subscriptions += body["subscriptions"].as_a.map { |a| a.as_s }
- user.subscriptions.uniq!
-
- user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
-
- PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
- end
-
- if body["watch_history"]?
- user.watched += body["watch_history"].as_a.map { |a| a.as_s }
- user.watched.uniq!
- PG_DB.exec("UPDATE users SET watched = $1 WHERE email = $2", user.watched, user.email)
- end
-
- if body["preferences"]?
- user.preferences = Preferences.from_json(body["preferences"].to_json)
- PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", user.preferences.to_json, user.email)
- end
-
- if playlists = body["playlists"]?.try &.as_a?
- playlists.each do |item|
- title = item["title"]?.try &.as_s?.try &.delete("<>")
- description = item["description"]?.try &.as_s?.try &.delete("\r")
- privacy = item["privacy"]?.try &.as_s?.try { |privacy| PlaylistPrivacy.parse? privacy }
-
- next if !title
- next if !description
- next if !privacy
-
- playlist = create_playlist(PG_DB, title, privacy, user)
- PG_DB.exec("UPDATE playlists SET description = $1 WHERE id = $2", description, playlist.id)
-
- videos = item["videos"]?.try &.as_a?.try &.each_with_index do |video_id, idx|
- raise InfoException.new("Playlist cannot have more than 500 videos") if idx > 500
-
- video_id = video_id.try &.as_s?
- next if !video_id
-
- begin
- video = get_video(video_id, PG_DB)
- rescue ex
- next
- end
-
- playlist_video = PlaylistVideo.new({
- title: video.title,
- id: video.id,
- author: video.author,
- ucid: video.ucid,
- length_seconds: video.length_seconds,
- published: video.published,
- plid: playlist.id,
- live_now: video.live_now,
- index: Random::Secure.rand(0_i64..Int64::MAX),
- })
-
- video_array = playlist_video.to_a
- args = arg_array(video_array)
-
- PG_DB.exec("INSERT INTO playlist_videos VALUES (#{args})", args: video_array)
- PG_DB.exec("UPDATE playlists SET index = array_append(index, $1), video_count = cardinality(index) + 1, updated = $2 WHERE id = $3", playlist_video.index, Time.utc, playlist.id)
- end
- end
- end
- when "import_youtube"
- if body[0..4] == "<opml"
- subscriptions = XML.parse(body)
- user.subscriptions += subscriptions.xpath_nodes(%q(//outline[@type="rss"])).map do |channel|
- channel["xmlUrl"].match(/UC[a-zA-Z0-9_-]{22}/).not_nil![0]
- end
- else
- subscriptions = JSON.parse(body)
- user.subscriptions += subscriptions.as_a.compact_map do |entry|
- entry["snippet"]["resourceId"]["channelId"].as_s
- end
- end
- user.subscriptions.uniq!
-
- user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
-
- PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
- when "import_freetube"
- user.subscriptions += body.scan(/"channelId":"(?<channel_id>[a-zA-Z0-9_-]{24})"/).map do |md|
- md["channel_id"]
- end
- user.subscriptions.uniq!
-
- user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
-
- PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
- when "import_newpipe_subscriptions"
- body = JSON.parse(body)
- user.subscriptions += body["subscriptions"].as_a.compact_map do |channel|
- if match = channel["url"].as_s.match(/\/channel\/(?<channel>UC[a-zA-Z0-9_-]{22})/)
- next match["channel"]
- elsif match = channel["url"].as_s.match(/\/user\/(?<user>.+)/)
- response = YT_POOL.client &.get("/user/#{match["user"]}?disable_polymer=1&hl=en&gl=US")
- html = XML.parse_html(response.body)
- ucid = html.xpath_node(%q(//link[@rel="canonical"])).try &.["href"].split("/")[-1]
- next ucid if ucid
- end
-
- nil
- end
- user.subscriptions.uniq!
-
- user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
-
- PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
- when "import_newpipe"
- Compress::Zip::Reader.open(IO::Memory.new(body)) do |file|
- file.each_entry do |entry|
- if entry.filename == "newpipe.db"
- tempfile = File.tempfile(".db")
- File.write(tempfile.path, entry.io.gets_to_end)
- db = DB.open("sqlite3://" + tempfile.path)
-
- user.watched += db.query_all("SELECT url FROM streams", as: String).map { |url| url.lchop("https://www.youtube.com/watch?v=") }
- user.watched.uniq!
-
- PG_DB.exec("UPDATE users SET watched = $1 WHERE email = $2", user.watched, user.email)
-
- user.subscriptions += db.query_all("SELECT url FROM subscriptions", as: String).map { |url| url.lchop("https://www.youtube.com/channel/") }
- user.subscriptions.uniq!
-
- user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
-
- PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
-
- db.close
- tempfile.delete
- end
- end
- end
- else nil # Ignore
- end
- end
- end
-
- env.redirect referer
-end
-
-get "/change_password" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- sid = env.get? "sid"
- referer = get_referer(env)
-
- if !user
- next env.redirect referer
- end
-
- user = user.as(User)
- sid = sid.as(String)
- csrf_token = generate_response(sid, {":change_password"}, HMAC_KEY, PG_DB)
-
- templated "change_password"
-end
-
-post "/change_password" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- sid = env.get? "sid"
- referer = get_referer(env)
-
- if !user
- next env.redirect referer
- end
-
- user = user.as(User)
- sid = sid.as(String)
- token = env.params.body["csrf_token"]?
-
- # We don't store passwords for Google accounts
- if !user.password
- next error_template(400, "Cannot change password for Google accounts")
- end
-
- begin
- validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
- rescue ex
- next error_template(400, ex)
- end
-
- password = env.params.body["password"]?
- if !password
- next error_template(401, "Password is a required field")
- end
-
- new_passwords = env.params.body.select { |k, v| k.match(/^new_password\[\d+\]$/) }.map { |k, v| v }
-
- if new_passwords.size <= 1 || new_passwords.uniq.size != 1
- next error_template(400, "New passwords must match")
- end
-
- new_password = new_passwords.uniq[0]
- if new_password.empty?
- next error_template(401, "Password cannot be empty")
- end
-
- if new_password.bytesize > 55
- next error_template(400, "Password cannot be longer than 55 characters")
- end
-
- if !Crypto::Bcrypt::Password.new(user.password.not_nil!).verify(password.byte_slice(0, 55))
- next error_template(401, "Incorrect password")
- end
-
- new_password = Crypto::Bcrypt::Password.create(new_password, cost: 10)
- PG_DB.exec("UPDATE users SET password = $1 WHERE email = $2", new_password.to_s, user.email)
-
- env.redirect referer
-end
-
-get "/delete_account" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- sid = env.get? "sid"
- referer = get_referer(env)
-
- if !user
- next env.redirect referer
- end
-
- user = user.as(User)
- sid = sid.as(String)
- csrf_token = generate_response(sid, {":delete_account"}, HMAC_KEY, PG_DB)
-
- templated "delete_account"
-end
-
-post "/delete_account" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- sid = env.get? "sid"
- referer = get_referer(env)
-
- if !user
- next env.redirect referer
- end
-
- user = user.as(User)
- sid = sid.as(String)
- token = env.params.body["csrf_token"]?
-
- begin
- validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
- rescue ex
- next error_template(400, ex)
- end
-
- view_name = "subscriptions_#{sha256(user.email)}"
- PG_DB.exec("DELETE FROM users * WHERE email = $1", user.email)
- PG_DB.exec("DELETE FROM session_ids * WHERE email = $1", user.email)
- PG_DB.exec("DROP MATERIALIZED VIEW #{view_name}")
-
- env.request.cookies.each do |cookie|
- cookie.expires = Time.utc(1990, 1, 1)
- env.response.cookies << cookie
- end
-
- env.redirect referer
-end
-
-get "/clear_watch_history" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- sid = env.get? "sid"
- referer = get_referer(env)
-
- if !user
- next env.redirect referer
- end
-
- user = user.as(User)
- sid = sid.as(String)
- csrf_token = generate_response(sid, {":clear_watch_history"}, HMAC_KEY, PG_DB)
-
- templated "clear_watch_history"
-end
-
-post "/clear_watch_history" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- sid = env.get? "sid"
- referer = get_referer(env)
-
- if !user
- next env.redirect referer
- end
-
- user = user.as(User)
- sid = sid.as(String)
- token = env.params.body["csrf_token"]?
-
- begin
- validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
- rescue ex
- next error_template(400, ex)
- end
-
- PG_DB.exec("UPDATE users SET watched = '{}' WHERE email = $1", user.email)
- env.redirect referer
-end
-
-get "/authorize_token" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- sid = env.get? "sid"
- referer = get_referer(env)
-
- if !user
- next env.redirect referer
- end
-
- user = user.as(User)
- sid = sid.as(String)
- csrf_token = generate_response(sid, {":authorize_token"}, HMAC_KEY, PG_DB)
-
- scopes = env.params.query["scopes"]?.try &.split(",")
- scopes ||= [] of String
-
- callback_url = env.params.query["callback_url"]?
- if callback_url
- callback_url = URI.parse(callback_url)
- end
-
- expire = env.params.query["expire"]?.try &.to_i?
-
- templated "authorize_token"
-end
-
-post "/authorize_token" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- sid = env.get? "sid"
- referer = get_referer(env)
-
- if !user
- next env.redirect referer
- end
-
- user = env.get("user").as(User)
- sid = sid.as(String)
- token = env.params.body["csrf_token"]?
-
- begin
- validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
- rescue ex
- next error_template(400, ex)
- end
-
- scopes = env.params.body.select { |k, v| k.match(/^scopes\[\d+\]$/) }.map { |k, v| v }
- callback_url = env.params.body["callbackUrl"]?
- expire = env.params.body["expire"]?.try &.to_i?
-
- access_token = generate_token(user.email, scopes, expire, HMAC_KEY, PG_DB)
-
- if callback_url
- access_token = URI.encode_www_form(access_token)
- url = URI.parse(callback_url)
-
- if url.query
- query = HTTP::Params.parse(url.query.not_nil!)
- else
- query = HTTP::Params.new
- end
-
- query["token"] = access_token
- url.query = query.to_s
-
- env.redirect url.to_s
- else
- csrf_token = ""
- env.set "access_token", access_token
- templated "authorize_token"
- end
-end
-
-get "/token_manager" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- sid = env.get? "sid"
- referer = get_referer(env, "/subscription_manager")
-
- if !user
- next env.redirect referer
- end
-
- user = user.as(User)
-
- tokens = PG_DB.query_all("SELECT id, issued FROM session_ids WHERE email = $1 ORDER BY issued DESC", user.email, as: {session: String, issued: Time})
-
- templated "token_manager"
-end
-
-post "/token_ajax" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- sid = env.get? "sid"
- referer = get_referer(env)
-
- redirect = env.params.query["redirect"]?
- redirect ||= "true"
- redirect = redirect == "true"
-
- if !user
- if redirect
- next env.redirect referer
- else
- next error_json(403, "No such user")
- end
- end
-
- user = user.as(User)
- sid = sid.as(String)
- token = env.params.body["csrf_token"]?
-
- begin
- validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
- rescue ex
- if redirect
- next error_template(400, ex)
- else
- next error_json(400, ex)
- end
- end
-
- if env.params.query["action_revoke_token"]?
- action = "action_revoke_token"
- else
- next env.redirect referer
- end
-
- session = env.params.query["session"]?
- session ||= ""
-
- case action
- when .starts_with? "action_revoke_token"
- PG_DB.exec("DELETE FROM session_ids * WHERE id = $1 AND email = $2", session, user.email)
- else
- next error_json(400, "Unsupported action #{action}")
- end
-
- if redirect
- env.redirect referer
- else
- env.response.content_type = "application/json"
- "{}"
- end
-end
-
-# Feeds
-
-get "/feed/playlists" do |env|
- env.redirect "/view_all_playlists"
-end
-
-get "/feed/top" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- message = translate(locale, "The Top feed has been removed from Invidious.")
- templated "message"
-end
-
-get "/feed/popular" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- if CONFIG.popular_enabled
- templated "popular"
- else
- message = translate(locale, "The Popular feed has been disabled by the administrator.")
- templated "message"
- end
-end
-
-get "/feed/trending" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- trending_type = env.params.query["type"]?
- trending_type ||= "Default"
-
- region = env.params.query["region"]?
- region ||= "US"
-
- begin
- trending, plid = fetch_trending(trending_type, region, locale)
- rescue ex
- next error_template(500, ex)
- end
-
- templated "trending"
-end
-
-get "/feed/subscriptions" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- sid = env.get? "sid"
- referer = get_referer(env)
-
- if !user
- next env.redirect referer
- end
-
- user = user.as(User)
- sid = sid.as(String)
- token = user.token
-
- if user.preferences.unseen_only
- env.set "show_watched", true
- end
-
- # Refresh account
- headers = HTTP::Headers.new
- headers["Cookie"] = env.request.headers["Cookie"]
-
- if !user.password
- user, sid = get_user(sid, headers, PG_DB)
- end
-
- max_results = env.params.query["max_results"]?.try &.to_i?.try &.clamp(0, MAX_ITEMS_PER_PAGE)
- max_results ||= user.preferences.max_results
- max_results ||= CONFIG.default_user_preferences.max_results
-
- page = env.params.query["page"]?.try &.to_i?
- page ||= 1
-
- videos, notifications = get_subscription_feed(PG_DB, user, max_results, page)
-
- # "updated" here is used for delivering new notifications, so if
- # we know a user has looked at their feed e.g. in the past 10 minutes,
- # they've already seen a video posted 20 minutes ago, and don't need
- # to be notified.
- PG_DB.exec("UPDATE users SET notifications = $1, updated = $2 WHERE email = $3", [] of String, Time.utc,
- user.email)
- user.notifications = [] of String
- env.set "user", user
-
- templated "subscriptions"
-end
-
-get "/feed/history" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- referer = get_referer(env)
-
- page = env.params.query["page"]?.try &.to_i?
- page ||= 1
-
- if !user
- next env.redirect referer
- end
-
- user = user.as(User)
-
- max_results = env.params.query["max_results"]?.try &.to_i?.try &.clamp(0, MAX_ITEMS_PER_PAGE)
- max_results ||= user.preferences.max_results
- max_results ||= CONFIG.default_user_preferences.max_results
-
- if user.watched[(page - 1) * max_results]?
- watched = user.watched.reverse[(page - 1) * max_results, max_results]
- end
- watched ||= [] of String
-
- templated "history"
-end
-
-get "/feed/channel/:ucid" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/atom+xml"
-
- ucid = env.params.url["ucid"]
-
- params = HTTP::Params.parse(env.params.query["params"]? || "")
-
- begin
- channel = get_about_info(ucid, locale)
- rescue ex : ChannelRedirect
- next env.redirect env.request.resource.gsub(ucid, ex.channel_id)
- rescue ex
- next error_atom(500, ex)
- end
-
- response = YT_POOL.client &.get("/feeds/videos.xml?channel_id=#{channel.ucid}")
- rss = XML.parse_html(response.body)
-
- videos = rss.xpath_nodes("//feed/entry").map do |entry|
- video_id = entry.xpath_node("videoid").not_nil!.content
- title = entry.xpath_node("title").not_nil!.content
-
- published = Time.parse_rfc3339(entry.xpath_node("published").not_nil!.content)
- updated = Time.parse_rfc3339(entry.xpath_node("updated").not_nil!.content)
-
- author = entry.xpath_node("author/name").not_nil!.content
- ucid = entry.xpath_node("channelid").not_nil!.content
- description_html = entry.xpath_node("group/description").not_nil!.to_s
- views = entry.xpath_node("group/community/statistics").not_nil!.["views"].to_i64
-
- SearchVideo.new({
- title: title,
- id: video_id,
- author: author,
- ucid: ucid,
- published: published,
- views: views,
- description_html: description_html,
- length_seconds: 0,
- live_now: false,
- paid: false,
- premium: false,
- premiere_timestamp: nil,
- })
- end
-
- XML.build(indent: " ", encoding: "UTF-8") do |xml|
- xml.element("feed", "xmlns:yt": "http://www.youtube.com/xml/schemas/2015",
- "xmlns:media": "http://search.yahoo.com/mrss/", xmlns: "http://www.w3.org/2005/Atom",
- "xml:lang": "en-US") do
- xml.element("link", rel: "self", href: "#{HOST_URL}#{env.request.resource}")
- xml.element("id") { xml.text "yt:channel:#{channel.ucid}" }
- xml.element("yt:channelId") { xml.text channel.ucid }
- xml.element("icon") { xml.text channel.author_thumbnail }
- xml.element("title") { xml.text channel.author }
- xml.element("link", rel: "alternate", href: "#{HOST_URL}/channel/#{channel.ucid}")
-
- xml.element("author") do
- xml.element("name") { xml.text channel.author }
- xml.element("uri") { xml.text "#{HOST_URL}/channel/#{channel.ucid}" }
- end
-
- videos.each do |video|
- video.to_xml(channel.auto_generated, params, xml)
- end
- end
- end
-end
-
-get "/feed/private" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/atom+xml"
-
- token = env.params.query["token"]?
-
- if !token
- env.response.status_code = 403
- next
- end
-
- user = PG_DB.query_one?("SELECT * FROM users WHERE token = $1", token.strip, as: User)
- if !user
- env.response.status_code = 403
- next
- end
-
- max_results = env.params.query["max_results"]?.try &.to_i?.try &.clamp(0, MAX_ITEMS_PER_PAGE)
- max_results ||= user.preferences.max_results
- max_results ||= CONFIG.default_user_preferences.max_results
-
- page = env.params.query["page"]?.try &.to_i?
- page ||= 1
-
- params = HTTP::Params.parse(env.params.query["params"]? || "")
-
- videos, notifications = get_subscription_feed(PG_DB, user, max_results, page)
-
- XML.build(indent: " ", encoding: "UTF-8") do |xml|
- xml.element("feed", "xmlns:yt": "http://www.youtube.com/xml/schemas/2015",
- "xmlns:media": "http://search.yahoo.com/mrss/", xmlns: "http://www.w3.org/2005/Atom",
- "xml:lang": "en-US") do
- xml.element("link", "type": "text/html", rel: "alternate", href: "#{HOST_URL}/feed/subscriptions")
- xml.element("link", "type": "application/atom+xml", rel: "self",
- href: "#{HOST_URL}#{env.request.resource}")
- xml.element("title") { xml.text translate(locale, "Invidious Private Feed for `x`", user.email) }
-
- (notifications + videos).each do |video|
- video.to_xml(locale, params, xml)
- end
- end
- end
-end
-
-get "/feed/playlist/:plid" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/atom+xml"
-
- plid = env.params.url["plid"]
-
- params = HTTP::Params.parse(env.params.query["params"]? || "")
- path = env.request.path
-
- if plid.starts_with? "IV"
- if playlist = PG_DB.query_one?("SELECT * FROM playlists WHERE id = $1", plid, as: InvidiousPlaylist)
- videos = get_playlist_videos(PG_DB, playlist, offset: 0, locale: locale)
-
- next XML.build(indent: " ", encoding: "UTF-8") do |xml|
- xml.element("feed", "xmlns:yt": "http://www.youtube.com/xml/schemas/2015",
- "xmlns:media": "http://search.yahoo.com/mrss/", xmlns: "http://www.w3.org/2005/Atom",
- "xml:lang": "en-US") do
- xml.element("link", rel: "self", href: "#{HOST_URL}#{env.request.resource}")
- xml.element("id") { xml.text "iv:playlist:#{plid}" }
- xml.element("iv:playlistId") { xml.text plid }
- xml.element("title") { xml.text playlist.title }
- xml.element("link", rel: "alternate", href: "#{HOST_URL}/playlist?list=#{plid}")
-
- xml.element("author") do
- xml.element("name") { xml.text playlist.author }
- end
-
- videos.each do |video|
- video.to_xml(false, xml)
- end
- end
- end
- else
- env.response.status_code = 404
- next
- end
- end
-
- response = YT_POOL.client &.get("/feeds/videos.xml?playlist_id=#{plid}")
- document = XML.parse(response.body)
-
- document.xpath_nodes(%q(//*[@href]|//*[@url])).each do |node|
- node.attributes.each do |attribute|
- case attribute.name
- when "url", "href"
- request_target = URI.parse(node[attribute.name]).request_target
- query_string_opt = request_target.starts_with?("/watch?v=") ? "&#{params}" : ""
- node[attribute.name] = "#{HOST_URL}#{request_target}#{query_string_opt}"
- else nil # Skip
- end
- end
- end
-
- document = document.to_xml(options: XML::SaveOptions::NO_DECL)
-
- document.scan(/<uri>(?<url>[^<]+)<\/uri>/).each do |match|
- content = "#{HOST_URL}#{URI.parse(match["url"]).request_target}"
- document = document.gsub(match[0], "<uri>#{content}</uri>")
- end
-
- document
-end
-
-get "/feeds/videos.xml" do |env|
- if ucid = env.params.query["channel_id"]?
- env.redirect "/feed/channel/#{ucid}"
- elsif user = env.params.query["user"]?
- env.redirect "/feed/channel/#{user}"
- elsif plid = env.params.query["playlist_id"]?
- env.redirect "/feed/playlist/#{plid}"
- end
-end
-
-# Support push notifications via PubSubHubbub
-
-get "/feed/webhook/:token" do |env|
- verify_token = env.params.url["token"]
-
- mode = env.params.query["hub.mode"]?
- topic = env.params.query["hub.topic"]?
- challenge = env.params.query["hub.challenge"]?
-
- if !mode || !topic || !challenge
- env.response.status_code = 400
- next
- else
- mode = mode.not_nil!
- topic = topic.not_nil!
- challenge = challenge.not_nil!
- end
-
- case verify_token
- when .starts_with? "v1"
- _, time, nonce, signature = verify_token.split(":")
- data = "#{time}:#{nonce}"
- when .starts_with? "v2"
- time, signature = verify_token.split(":")
- data = "#{time}"
- else
- env.response.status_code = 400
- next
- end
-
- # The hub will sometimes check if we're still subscribed after delivery errors,
- # so we reply with a 200 as long as the request hasn't expired
- if Time.utc.to_unix - time.to_i > 432000
- env.response.status_code = 400
- next
- end
-
- if OpenSSL::HMAC.hexdigest(:sha1, HMAC_KEY, data) != signature
- env.response.status_code = 400
- next
- end
-
- if ucid = HTTP::Params.parse(URI.parse(topic).query.not_nil!)["channel_id"]?
- PG_DB.exec("UPDATE channels SET subscribed = $1 WHERE id = $2", Time.utc, ucid)
- elsif plid = HTTP::Params.parse(URI.parse(topic).query.not_nil!)["playlist_id"]?
- PG_DB.exec("UPDATE playlists SET subscribed = $1 WHERE id = $2", Time.utc, ucid)
- else
- env.response.status_code = 400
- next
- end
-
- env.response.status_code = 200
- challenge
-end
-
-post "/feed/webhook/:token" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- token = env.params.url["token"]
- body = env.request.body.not_nil!.gets_to_end
- signature = env.request.headers["X-Hub-Signature"].lchop("sha1=")
-
- if signature != OpenSSL::HMAC.hexdigest(:sha1, HMAC_KEY, body)
- LOGGER.error("/feed/webhook/#{token} : Invalid signature")
- env.response.status_code = 200
- next
- end
-
- spawn do
- rss = XML.parse_html(body)
- rss.xpath_nodes("//feed/entry").each do |entry|
- id = entry.xpath_node("videoid").not_nil!.content
- author = entry.xpath_node("author/name").not_nil!.content
- published = Time.parse_rfc3339(entry.xpath_node("published").not_nil!.content)
- updated = Time.parse_rfc3339(entry.xpath_node("updated").not_nil!.content)
-
- video = get_video(id, PG_DB, force_refresh: true)
-
- # Deliver notifications to `/api/v1/auth/notifications`
- payload = {
- "topic" => video.ucid,
- "videoId" => video.id,
- "published" => published.to_unix,
- }.to_json
- PG_DB.exec("NOTIFY notifications, E'#{payload}'")
-
- video = ChannelVideo.new({
- id: id,
- title: video.title,
- published: published,
- updated: updated,
- ucid: video.ucid,
- author: author,
- length_seconds: video.length_seconds,
- live_now: video.live_now,
- premiere_timestamp: video.premiere_timestamp,
- views: video.views,
- })
-
- was_insert = PG_DB.query_one("INSERT INTO channel_videos VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
- ON CONFLICT (id) DO UPDATE SET title = $2, published = $3,
- updated = $4, ucid = $5, author = $6, length_seconds = $7,
- live_now = $8, premiere_timestamp = $9, views = $10 returning (xmax=0) as was_insert", *video.to_tuple, as: Bool)
-
- PG_DB.exec("UPDATE users SET notifications = array_append(notifications, $1),
- feed_needs_update = true WHERE $2 = ANY(subscriptions)", video.id, video.ucid) if was_insert
- end
- end
-
- env.response.status_code = 200
- next
-end
-
-# Channels
-
-{"/channel/:ucid/live", "/user/:user/live", "/c/:user/live"}.each do |route|
- get route do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- # Appears to be a bug in routing, having several routes configured
- # as `/a/:a`, `/b/:a`, `/c/:a` results in 404
- value = env.request.resource.split("/")[2]
- body = ""
- {"channel", "user", "c"}.each do |type|
- response = YT_POOL.client &.get("/#{type}/#{value}/live?disable_polymer=1")
- if response.status_code == 200
- body = response.body
- end
- end
-
- video_id = body.match(/'VIDEO_ID': "(?<id>[a-zA-Z0-9_-]{11})"/).try &.["id"]?
- if video_id
- params = [] of String
- env.params.query.each do |k, v|
- params << "#{k}=#{v}"
- end
- params = params.join("&")
-
- url = "/watch?v=#{video_id}"
- if !params.empty?
- url += "&#{params}"
- end
-
- env.redirect url
- else
- env.redirect "/channel/#{value}"
- end
- end
-end
-
-# YouTube appears to let users set a "brand" URL that
-# is different from their username, so we convert that here
-get "/c/:user" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.params.url["user"]
-
- response = YT_POOL.client &.get("/c/#{user}")
- html = XML.parse_html(response.body)
-
- ucid = html.xpath_node(%q(//link[@rel="canonical"])).try &.["href"].split("/")[-1]
- next env.redirect "/" if !ucid
-
- env.redirect "/channel/#{ucid}"
-end
-
-# Legacy endpoint for /user/:username
-get "/profile" do |env|
- user = env.params.query["user"]?
- if !user
- env.redirect "/"
- else
- env.redirect "/user/#{user}"
- end
-end
-
-get "/attribution_link" do |env|
- if query = env.params.query["u"]?
- url = URI.parse(query).request_target
- else
- url = "/"
- end
-
- env.redirect url
-end
-
-# Page used by YouTube to provide captioning widget, since we
-# don't support it we redirect to '/'
-get "/timedtext_video" do |env|
- env.redirect "/"
-end
-
-get "/user/:user" do |env|
- user = env.params.url["user"]
- env.redirect "/channel/#{user}"
-end
-
-get "/user/:user/videos" do |env|
- user = env.params.url["user"]
- env.redirect "/channel/#{user}/videos"
-end
-
-get "/user/:user/about" do |env|
- user = env.params.url["user"]
- env.redirect "/channel/#{user}"
-end
-
-get "/channel/:ucid/about" do |env|
- ucid = env.params.url["ucid"]
- env.redirect "/channel/#{ucid}"
-end
-
-get "/channel/:ucid" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- if user
- user = user.as(User)
- subscriptions = user.subscriptions
- end
- subscriptions ||= [] of String
-
- ucid = env.params.url["ucid"]
-
- page = env.params.query["page"]?.try &.to_i?
- page ||= 1
-
- continuation = env.params.query["continuation"]?
-
- sort_by = env.params.query["sort_by"]?.try &.downcase
-
- begin
- channel = get_about_info(ucid, locale)
- rescue ex : ChannelRedirect
- next env.redirect env.request.resource.gsub(ucid, ex.channel_id)
- rescue ex
- next error_template(500, ex)
- end
-
- if channel.auto_generated
- sort_options = {"last", "oldest", "newest"}
- sort_by ||= "last"
-
- items, continuation = fetch_channel_playlists(channel.ucid, channel.author, continuation, sort_by)
- items.uniq! do |item|
- if item.responds_to?(:title)
- item.title
- elsif item.responds_to?(:author)
- item.author
- end
- end
- items = items.select(&.is_a?(SearchPlaylist)).map(&.as(SearchPlaylist))
- items.each { |item| item.author = "" }
- else
- sort_options = {"newest", "oldest", "popular"}
- sort_by ||= "newest"
-
- count, items = get_60_videos(channel.ucid, channel.author, page, channel.auto_generated, sort_by)
- items.reject! &.paid
-
- env.set "search", "channel:#{channel.ucid} "
- end
-
- templated "channel"
-end
-
-get "/channel/:ucid/videos" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- ucid = env.params.url["ucid"]
- params = env.request.query
-
- if !params || params.empty?
- params = ""
- else
- params = "?#{params}"
- end
-
- env.redirect "/channel/#{ucid}#{params}"
-end
-
-get "/channel/:ucid/playlists" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- if user
- user = user.as(User)
- subscriptions = user.subscriptions
- end
- subscriptions ||= [] of String
-
- ucid = env.params.url["ucid"]
-
- continuation = env.params.query["continuation"]?
-
- sort_by = env.params.query["sort_by"]?.try &.downcase
- sort_by ||= "last"
-
- begin
- channel = get_about_info(ucid, locale)
- rescue ex : ChannelRedirect
- next env.redirect env.request.resource.gsub(ucid, ex.channel_id)
- rescue ex
- next error_template(500, ex)
- end
-
- if channel.auto_generated
- next env.redirect "/channel/#{channel.ucid}"
- end
-
- items, continuation = fetch_channel_playlists(channel.ucid, channel.author, continuation, sort_by)
- items = items.select { |item| item.is_a?(SearchPlaylist) }.map { |item| item.as(SearchPlaylist) }
- items.each { |item| item.author = "" }
-
- env.set "search", "channel:#{channel.ucid} "
- templated "playlists"
-end
-
-get "/channel/:ucid/community" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- user = env.get? "user"
- if user
- user = user.as(User)
- subscriptions = user.subscriptions
- end
- subscriptions ||= [] of String
-
- ucid = env.params.url["ucid"]
-
- thin_mode = env.params.query["thin_mode"]? || env.get("preferences").as(Preferences).thin_mode
- thin_mode = thin_mode == "true"
-
- continuation = env.params.query["continuation"]?
- # sort_by = env.params.query["sort_by"]?.try &.downcase
-
- begin
- channel = get_about_info(ucid, locale)
- rescue ex : ChannelRedirect
- next env.redirect env.request.resource.gsub(ucid, ex.channel_id)
- rescue ex
- next error_template(500, ex)
- end
-
- if !channel.tabs.includes? "community"
- next env.redirect "/channel/#{channel.ucid}"
- end
-
- begin
- items = JSON.parse(fetch_channel_community(ucid, continuation, locale, "json", thin_mode))
- rescue ex : InfoException
- env.response.status_code = 500
- error_message = ex.message
- rescue ex
- next error_template(500, ex)
- end
-
- env.set "search", "channel:#{channel.ucid} "
- templated "community"
-end
-
-# API Endpoints
-
-get "/api/v1/stats" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
- env.response.content_type = "application/json"
-
- if !CONFIG.statistics_enabled
- next error_json(400, "Statistics are not enabled.")
- end
-
- Invidious::Jobs::StatisticsRefreshJob::STATISTICS.to_json
-end
-
-# YouTube provides "storyboards", which are sprites containing x * y
-# preview thumbnails for individual scenes in a video.
-# See https://support.jwplayer.com/articles/how-to-add-preview-thumbnails
-get "/api/v1/storyboards/:id" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
-
- id = env.params.url["id"]
- region = env.params.query["region"]?
-
- begin
- video = get_video(id, PG_DB, region: region)
- rescue ex : VideoRedirect
- env.response.headers["Location"] = env.request.resource.gsub(id, ex.video_id)
- next error_json(302, "Video is unavailable", {"videoId" => ex.video_id})
- rescue ex
- env.response.status_code = 500
- next
- end
-
- storyboards = video.storyboards
- width = env.params.query["width"]?
- height = env.params.query["height"]?
-
- if !width && !height
- response = JSON.build do |json|
- json.object do
- json.field "storyboards" do
- generate_storyboards(json, id, storyboards)
- end
- end
- end
-
- next response
- end
-
- env.response.content_type = "text/vtt"
-
- storyboard = storyboards.select { |storyboard| width == "#{storyboard[:width]}" || height == "#{storyboard[:height]}" }
-
- if storyboard.empty?
- env.response.status_code = 404
- next
- else
- storyboard = storyboard[0]
- end
-
- String.build do |str|
- str << <<-END_VTT
- WEBVTT
-
-
- END_VTT
-
- start_time = 0.milliseconds
- end_time = storyboard[:interval].milliseconds
-
- storyboard[:storyboard_count].times do |i|
- url = storyboard[:url]
- authority = /(i\d?).ytimg.com/.match(url).not_nil![1]?
- url = url.gsub("$M", i).gsub(%r(https://i\d?.ytimg.com/sb/), "")
- url = "#{HOST_URL}/sb/#{authority}/#{url}"
-
- storyboard[:storyboard_height].times do |j|
- storyboard[:storyboard_width].times do |k|
- str << <<-END_CUE
- #{start_time}.000 --> #{end_time}.000
- #{url}#xywh=#{storyboard[:width] * k},#{storyboard[:height] * j},#{storyboard[:width] - 2},#{storyboard[:height]}
-
-
- END_CUE
-
- start_time += storyboard[:interval].milliseconds
- end_time += storyboard[:interval].milliseconds
- end
- end
- end
- end
-end
-
-get "/api/v1/captions/:id" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
-
- id = env.params.url["id"]
- region = env.params.query["region"]?
-
- # See https://github.com/ytdl-org/youtube-dl/blob/6ab30ff50bf6bd0585927cb73c7421bef184f87a/youtube_dl/extractor/youtube.py#L1354
- # It is possible to use `/api/timedtext?type=list&v=#{id}` and
- # `/api/timedtext?type=track&v=#{id}&lang=#{lang_code}` directly,
- # but this does not provide links for auto-generated captions.
- #
- # In future this should be investigated as an alternative, since it does not require
- # getting video info.
-
- begin
- video = get_video(id, PG_DB, region: region)
- rescue ex : VideoRedirect
- env.response.headers["Location"] = env.request.resource.gsub(id, ex.video_id)
- next error_json(302, "Video is unavailable", {"videoId" => ex.video_id})
- rescue ex
- env.response.status_code = 500
- next
- end
-
- captions = video.captions
-
- label = env.params.query["label"]?
- lang = env.params.query["lang"]?
- tlang = env.params.query["tlang"]?
-
- if !label && !lang
- response = JSON.build do |json|
- json.object do
- json.field "captions" do
- json.array do
- captions.each do |caption|
- json.object do
- json.field "label", caption.name.simpleText
- json.field "languageCode", caption.languageCode
- json.field "url", "/api/v1/captions/#{id}?label=#{URI.encode_www_form(caption.name.simpleText)}"
- end
- end
- end
- end
- end
- end
-
- next response
- end
-
- env.response.content_type = "text/vtt; charset=UTF-8"
-
- if lang
- caption = captions.select { |caption| caption.languageCode == lang }
- else
- caption = captions.select { |caption| caption.name.simpleText == label }
- end
-
- if caption.empty?
- env.response.status_code = 404
- next
- else
- caption = caption[0]
- end
-
- url = URI.parse("#{caption.baseUrl}&tlang=#{tlang}").request_target
-
- # Auto-generated captions often have cues that aren't aligned properly with the video,
- # as well as some other markup that makes it cumbersome, so we try to fix that here
- if caption.name.simpleText.includes? "auto-generated"
- caption_xml = YT_POOL.client &.get(url).body
- caption_xml = XML.parse(caption_xml)
-
- webvtt = String.build do |str|
- str << <<-END_VTT
- WEBVTT
- Kind: captions
- Language: #{tlang || caption.languageCode}
-
-
- END_VTT
-
- caption_nodes = caption_xml.xpath_nodes("//transcript/text")
- caption_nodes.each_with_index do |node, i|
- start_time = node["start"].to_f.seconds
- duration = node["dur"]?.try &.to_f.seconds
- duration ||= start_time
-
- if caption_nodes.size > i + 1
- end_time = caption_nodes[i + 1]["start"].to_f.seconds
- else
- end_time = start_time + duration
- end
-
- start_time = "#{start_time.hours.to_s.rjust(2, '0')}:#{start_time.minutes.to_s.rjust(2, '0')}:#{start_time.seconds.to_s.rjust(2, '0')}.#{start_time.milliseconds.to_s.rjust(3, '0')}"
- end_time = "#{end_time.hours.to_s.rjust(2, '0')}:#{end_time.minutes.to_s.rjust(2, '0')}:#{end_time.seconds.to_s.rjust(2, '0')}.#{end_time.milliseconds.to_s.rjust(3, '0')}"
-
- text = HTML.unescape(node.content)
- text = text.gsub(/<font color="#[a-fA-F0-9]{6}">/, "")
- text = text.gsub(/<\/font>/, "")
- if md = text.match(/(?<name>.*) : (?<text>.*)/)
- text = "<v #{md["name"]}>#{md["text"]}</v>"
- end
-
- str << <<-END_CUE
- #{start_time} --> #{end_time}
- #{text}
-
-
- END_CUE
- end
- end
- else
- webvtt = YT_POOL.client &.get("#{url}&format=vtt").body
- end
-
- if title = env.params.query["title"]?
- # https://blog.fastmail.com/2011/06/24/download-non-english-filenames/
- env.response.headers["Content-Disposition"] = "attachment; filename=\"#{URI.encode_www_form(title)}\"; filename*=UTF-8''#{URI.encode_www_form(title)}"
- end
-
- webvtt
-end
-
-get "/api/v1/comments/:id" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
- region = env.params.query["region"]?
-
- env.response.content_type = "application/json"
-
- id = env.params.url["id"]
-
- source = env.params.query["source"]?
- source ||= "youtube"
-
- thin_mode = env.params.query["thin_mode"]?
- thin_mode = thin_mode == "true"
-
- format = env.params.query["format"]?
- format ||= "json"
-
- action = env.params.query["action"]?
- action ||= "action_get_comments"
-
- continuation = env.params.query["continuation"]?
- sort_by = env.params.query["sort_by"]?.try &.downcase
-
- if source == "youtube"
- sort_by ||= "top"
-
- begin
- comments = fetch_youtube_comments(id, PG_DB, continuation, format, locale, thin_mode, region, sort_by: sort_by, action: action)
- rescue ex
- next error_json(500, ex)
- end
-
- next comments
- elsif source == "reddit"
- sort_by ||= "confidence"
-
- begin
- comments, reddit_thread = fetch_reddit_comments(id, sort_by: sort_by)
- content_html = template_reddit_comments(comments, locale)
-
- content_html = fill_links(content_html, "https", "www.reddit.com")
- content_html = replace_links(content_html)
- rescue ex
- comments = nil
- reddit_thread = nil
- content_html = ""
- end
-
- if !reddit_thread || !comments
- env.response.status_code = 404
- next
- end
-
- if format == "json"
- reddit_thread = JSON.parse(reddit_thread.to_json).as_h
- reddit_thread["comments"] = JSON.parse(comments.to_json)
-
- next reddit_thread.to_json
- else
- response = {
- "title" => reddit_thread.title,
- "permalink" => reddit_thread.permalink,
- "contentHtml" => content_html,
- }
-
- next response.to_json
- end
- end
-end
-
-get "/api/v1/insights/:id" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
- next error_json(410, "YouTube has removed publicly available analytics.")
-end
-
-get "/api/v1/annotations/:id" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "text/xml"
-
- id = env.params.url["id"]
- source = env.params.query["source"]?
- source ||= "archive"
-
- if !id.match(/[a-zA-Z0-9_-]{11}/)
- env.response.status_code = 400
- next
- end
-
- annotations = ""
-
- case source
- when "archive"
- if CONFIG.cache_annotations && (cached_annotation = PG_DB.query_one?("SELECT * FROM annotations WHERE id = $1", id, as: Annotation))
- annotations = cached_annotation.annotations
- else
- index = CHARS_SAFE.index(id[0]).not_nil!.to_s.rjust(2, '0')
-
- # IA doesn't handle leading hyphens,
- # so we use https://archive.org/details/youtubeannotations_64
- if index == "62"
- index = "64"
- id = id.sub(/^-/, 'A')
- end
-
- file = URI.encode_www_form("#{id[0, 3]}/#{id}.xml")
-
- location = make_client(ARCHIVE_URL, &.get("/download/youtubeannotations_#{index}/#{id[0, 2]}.tar/#{file}"))
-
- if !location.headers["Location"]?
- env.response.status_code = location.status_code
- end
-
- response = make_client(URI.parse(location.headers["Location"]), &.get(location.headers["Location"]))
-
- if response.body.empty?
- env.response.status_code = 404
- next
- end
-
- if response.status_code != 200
- env.response.status_code = response.status_code
- next
- end
-
- annotations = response.body
-
- cache_annotation(PG_DB, id, annotations)
- end
- else # "youtube"
- response = YT_POOL.client &.get("/annotations_invideo?video_id=#{id}")
-
- if response.status_code != 200
- env.response.status_code = response.status_code
- next
- end
-
- annotations = response.body
- end
-
- etag = sha256(annotations)[0, 16]
- if env.request.headers["If-None-Match"]?.try &.== etag
- env.response.status_code = 304
- else
- env.response.headers["ETag"] = etag
- annotations
- end
-end
-
-get "/api/v1/videos/:id" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
-
- id = env.params.url["id"]
- region = env.params.query["region"]?
-
- begin
- video = get_video(id, PG_DB, region: region)
- rescue ex : VideoRedirect
- env.response.headers["Location"] = env.request.resource.gsub(id, ex.video_id)
- next error_json(302, "Video is unavailable", {"videoId" => ex.video_id})
- rescue ex
- next error_json(500, ex)
- end
-
- video.to_json(locale)
-end
-
-get "/api/v1/trending" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
-
- region = env.params.query["region"]?
- trending_type = env.params.query["type"]?
-
- begin
- trending, plid = fetch_trending(trending_type, region, locale)
- rescue ex
- next error_json(500, ex)
- end
-
- videos = JSON.build do |json|
- json.array do
- trending.each do |video|
- video.to_json(locale, json)
- end
- end
- end
-
- videos
-end
-
-get "/api/v1/popular" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
-
- if !CONFIG.popular_enabled
- error_message = {"error" => "Administrator has disabled this endpoint."}.to_json
- env.response.status_code = 400
- next error_message
- end
-
- JSON.build do |json|
- json.array do
- popular_videos.each do |video|
- video.to_json(locale, json)
- end
- end
- end
-end
-
-get "/api/v1/top" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
- env.response.status_code = 400
- {"error" => "The Top feed has been removed from Invidious."}.to_json
-end
-
-get "/api/v1/channels/:ucid" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
-
- ucid = env.params.url["ucid"]
- sort_by = env.params.query["sort_by"]?.try &.downcase
- sort_by ||= "newest"
-
- begin
- channel = get_about_info(ucid, locale)
- rescue ex : ChannelRedirect
- env.response.headers["Location"] = env.request.resource.gsub(ucid, ex.channel_id)
- next error_json(302, "Channel is unavailable", {"authorId" => ex.channel_id})
- rescue ex
- next error_json(500, ex)
- end
-
- page = 1
- if channel.auto_generated
- videos = [] of SearchVideo
- count = 0
- else
- begin
- count, videos = get_60_videos(channel.ucid, channel.author, page, channel.auto_generated, sort_by)
- rescue ex
- next error_json(500, ex)
- end
- end
-
- JSON.build do |json|
- # TODO: Refactor into `to_json` for InvidiousChannel
- json.object do
- json.field "author", channel.author
- json.field "authorId", channel.ucid
- json.field "authorUrl", channel.author_url
-
- json.field "authorBanners" do
- json.array do
- if channel.banner
- qualities = {
- {width: 2560, height: 424},
- {width: 2120, height: 351},
- {width: 1060, height: 175},
- }
- qualities.each do |quality|
- json.object do
- json.field "url", channel.banner.not_nil!.gsub("=w1060-", "=w#{quality[:width]}-")
- json.field "width", quality[:width]
- json.field "height", quality[:height]
- end
- end
-
- json.object do
- json.field "url", channel.banner.not_nil!.split("=w1060-")[0]
- json.field "width", 512
- json.field "height", 288
- end
- end
- end
- end
-
- json.field "authorThumbnails" do
- json.array do
- qualities = {32, 48, 76, 100, 176, 512}
-
- qualities.each do |quality|
- json.object do
- json.field "url", channel.author_thumbnail.gsub(/=s\d+/, "=s#{quality}")
- json.field "width", quality
- json.field "height", quality
- end
- end
- end
- end
-
- json.field "subCount", channel.sub_count
- json.field "totalViews", channel.total_views
- json.field "joined", channel.joined.to_unix
- json.field "paid", channel.paid
-
- json.field "autoGenerated", channel.auto_generated
- json.field "isFamilyFriendly", channel.is_family_friendly
- json.field "description", html_to_content(channel.description_html)
- json.field "descriptionHtml", channel.description_html
-
- json.field "allowedRegions", channel.allowed_regions
-
- json.field "latestVideos" do
- json.array do
- videos.each do |video|
- video.to_json(locale, json)
- end
- end
- end
-
- json.field "relatedChannels" do
- json.array do
- channel.related_channels.each do |related_channel|
- json.object do
- json.field "author", related_channel.author
- json.field "authorId", related_channel.ucid
- json.field "authorUrl", related_channel.author_url
-
- json.field "authorThumbnails" do
- json.array do
- qualities = {32, 48, 76, 100, 176, 512}
-
- qualities.each do |quality|
- json.object do
- json.field "url", related_channel.author_thumbnail.gsub(/=\d+/, "=s#{quality}")
- json.field "width", quality
- json.field "height", quality
- end
- end
- end
- end
- end
- end
- end
- end
- end
- end
-end
-
-{"/api/v1/channels/:ucid/videos", "/api/v1/channels/videos/:ucid"}.each do |route|
- get route do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
-
- ucid = env.params.url["ucid"]
- page = env.params.query["page"]?.try &.to_i?
- page ||= 1
- sort_by = env.params.query["sort"]?.try &.downcase
- sort_by ||= env.params.query["sort_by"]?.try &.downcase
- sort_by ||= "newest"
-
- begin
- channel = get_about_info(ucid, locale)
- rescue ex : ChannelRedirect
- env.response.headers["Location"] = env.request.resource.gsub(ucid, ex.channel_id)
- next error_json(302, "Channel is unavailable", {"authorId" => ex.channel_id})
- rescue ex
- next error_json(500, ex)
- end
-
- begin
- count, videos = get_60_videos(channel.ucid, channel.author, page, channel.auto_generated, sort_by)
- rescue ex
- next error_json(500, ex)
- end
-
- JSON.build do |json|
- json.array do
- videos.each do |video|
- video.to_json(locale, json)
- end
- end
- end
- end
-end
-
-{"/api/v1/channels/:ucid/latest", "/api/v1/channels/latest/:ucid"}.each do |route|
- get route do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
-
- ucid = env.params.url["ucid"]
-
- begin
- videos = get_latest_videos(ucid)
- rescue ex
- next error_json(500, ex)
- end
-
- JSON.build do |json|
- json.array do
- videos.each do |video|
- video.to_json(locale, json)
- end
- end
- end
- end
-end
-
-{"/api/v1/channels/:ucid/playlists", "/api/v1/channels/playlists/:ucid"}.each do |route|
- get route do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
-
- ucid = env.params.url["ucid"]
- continuation = env.params.query["continuation"]?
- sort_by = env.params.query["sort"]?.try &.downcase ||
- env.params.query["sort_by"]?.try &.downcase ||
- "last"
-
- begin
- channel = get_about_info(ucid, locale)
- rescue ex : ChannelRedirect
- env.response.headers["Location"] = env.request.resource.gsub(ucid, ex.channel_id)
- next error_json(302, "Channel is unavailable", {"authorId" => ex.channel_id})
- rescue ex
- next error_json(500, ex)
- end
-
- items, continuation = fetch_channel_playlists(channel.ucid, channel.author, continuation, sort_by)
-
- JSON.build do |json|
- json.object do
- json.field "playlists" do
- json.array do
- items.each do |item|
- item.to_json(locale, json) if item.is_a?(SearchPlaylist)
- end
- end
- end
-
- json.field "continuation", continuation
- end
- end
- end
-end
-
-{"/api/v1/channels/:ucid/comments", "/api/v1/channels/comments/:ucid"}.each do |route|
- get route do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
-
- ucid = env.params.url["ucid"]
-
- thin_mode = env.params.query["thin_mode"]?
- thin_mode = thin_mode == "true"
-
- format = env.params.query["format"]?
- format ||= "json"
-
- continuation = env.params.query["continuation"]?
- # sort_by = env.params.query["sort_by"]?.try &.downcase
-
- begin
- fetch_channel_community(ucid, continuation, locale, format, thin_mode)
- rescue ex
- next error_json(500, ex)
- end
- end
-end
-
-get "/api/v1/channels/search/:ucid" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
-
- ucid = env.params.url["ucid"]
-
- query = env.params.query["q"]?
- query ||= ""
-
- page = env.params.query["page"]?.try &.to_i?
- page ||= 1
-
- count, search_results = channel_search(query, page, ucid)
- JSON.build do |json|
- json.array do
- search_results.each do |item|
- item.to_json(locale, json)
- end
- end
- end
-end
-
-get "/api/v1/search" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
- region = env.params.query["region"]?
-
- env.response.content_type = "application/json"
-
- query = env.params.query["q"]?
- query ||= ""
-
- page = env.params.query["page"]?.try &.to_i?
- page ||= 1
-
- sort_by = env.params.query["sort_by"]?.try &.downcase
- sort_by ||= "relevance"
-
- date = env.params.query["date"]?.try &.downcase
- date ||= ""
-
- duration = env.params.query["duration"]?.try &.downcase
- duration ||= ""
-
- features = env.params.query["features"]?.try &.split(",").map { |feature| feature.downcase }
- features ||= [] of String
-
- content_type = env.params.query["type"]?.try &.downcase
- content_type ||= "video"
-
- begin
- search_params = produce_search_params(page, sort_by, date, content_type, duration, features)
- rescue ex
- next error_json(400, ex)
- end
-
- count, search_results = search(query, search_params, region).as(Tuple)
- JSON.build do |json|
- json.array do
- search_results.each do |item|
- item.to_json(locale, json)
- end
- end
- end
-end
-
-get "/api/v1/search/suggestions" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
- region = env.params.query["region"]?
-
- env.response.content_type = "application/json"
-
- query = env.params.query["q"]?
- query ||= ""
-
- begin
- headers = HTTP::Headers{":authority" => "suggestqueries.google.com"}
- response = YT_POOL.client &.get("/complete/search?hl=en&gl=#{region}&client=youtube&ds=yt&q=#{URI.encode_www_form(query)}&callback=suggestCallback", headers).body
-
- body = response[35..-2]
- body = JSON.parse(body).as_a
- suggestions = body[1].as_a[0..-2]
-
- JSON.build do |json|
- json.object do
- json.field "query", body[0].as_s
- json.field "suggestions" do
- json.array do
- suggestions.each do |suggestion|
- json.string suggestion[0].as_s
- end
- end
- end
- end
- end
- rescue ex
- next error_json(500, ex)
- end
-end
-
-{"/api/v1/playlists/:plid", "/api/v1/auth/playlists/:plid"}.each do |route|
- get route do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
- plid = env.params.url["plid"]
-
- offset = env.params.query["index"]?.try &.to_i?
- offset ||= env.params.query["page"]?.try &.to_i?.try { |page| (page - 1) * 100 }
- offset ||= 0
-
- continuation = env.params.query["continuation"]?
-
- format = env.params.query["format"]?
- format ||= "json"
-
- if plid.starts_with? "RD"
- next env.redirect "/api/v1/mixes/#{plid}"
- end
-
- begin
- playlist = get_playlist(PG_DB, plid, locale)
- rescue ex : InfoException
- next error_json(404, ex)
- rescue ex
- next error_json(404, "Playlist does not exist.")
- end
-
- user = env.get?("user").try &.as(User)
- if !playlist || playlist.privacy.private? && playlist.author != user.try &.email
- next error_json(404, "Playlist does not exist.")
- end
-
- response = playlist.to_json(offset, locale, continuation: continuation)
-
- if format == "html"
- response = JSON.parse(response)
- playlist_html = template_playlist(response)
- index, next_video = response["videos"].as_a.skip(1).select { |video| !video["author"].as_s.empty? }[0]?.try { |v| {v["index"], v["videoId"]} } || {nil, nil}
-
- response = {
- "playlistHtml" => playlist_html,
- "index" => index,
- "nextVideo" => next_video,
- }.to_json
- end
-
- response
- end
-end
-
-get "/api/v1/mixes/:rdid" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
-
- rdid = env.params.url["rdid"]
-
- continuation = env.params.query["continuation"]?
- continuation ||= rdid.lchop("RD")[0, 11]
-
- format = env.params.query["format"]?
- format ||= "json"
-
- begin
- mix = fetch_mix(rdid, continuation, locale: locale)
-
- if !rdid.ends_with? continuation
- mix = fetch_mix(rdid, mix.videos[1].id)
- index = mix.videos.index(mix.videos.select { |video| video.id == continuation }[0]?)
- end
-
- mix.videos = mix.videos[index..-1]
- rescue ex
- next error_json(500, ex)
- end
-
- response = JSON.build do |json|
- json.object do
- json.field "title", mix.title
- json.field "mixId", mix.id
-
- json.field "videos" do
- json.array do
- mix.videos.each do |video|
- json.object do
- json.field "title", video.title
- json.field "videoId", video.id
- json.field "author", video.author
-
- json.field "authorId", video.ucid
- json.field "authorUrl", "/channel/#{video.ucid}"
-
- json.field "videoThumbnails" do
- json.array do
- generate_thumbnails(json, video.id)
- end
- end
-
- json.field "index", video.index
- json.field "lengthSeconds", video.length_seconds
- end
- end
- end
- end
- end
- end
-
- if format == "html"
- response = JSON.parse(response)
- playlist_html = template_mix(response)
- next_video = response["videos"].as_a.select { |video| !video["author"].as_s.empty? }[0]?.try &.["videoId"]
-
- response = {
- "playlistHtml" => playlist_html,
- "nextVideo" => next_video,
- }.to_json
- end
-
- response
-end
-
-# Authenticated endpoints
-
-get "/api/v1/auth/notifications" do |env|
- env.response.content_type = "text/event-stream"
-
- topics = env.params.query["topics"]?.try &.split(",").uniq.first(1000)
- topics ||= [] of String
-
- create_notification_stream(env, topics, connection_channel)
-end
-
-post "/api/v1/auth/notifications" do |env|
- env.response.content_type = "text/event-stream"
-
- topics = env.params.body["topics"]?.try &.split(",").uniq.first(1000)
- topics ||= [] of String
-
- create_notification_stream(env, topics, connection_channel)
-end
-
-get "/api/v1/auth/preferences" do |env|
- env.response.content_type = "application/json"
- user = env.get("user").as(User)
- user.preferences.to_json
-end
-
-post "/api/v1/auth/preferences" do |env|
- env.response.content_type = "application/json"
- user = env.get("user").as(User)
-
- begin
- preferences = Preferences.from_json(env.request.body || "{}")
- rescue
- preferences = user.preferences
- end
-
- PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", preferences.to_json, user.email)
-
- env.response.status_code = 204
-end
-
-get "/api/v1/auth/feed" do |env|
- env.response.content_type = "application/json"
-
- user = env.get("user").as(User)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- max_results = env.params.query["max_results"]?.try &.to_i?
- max_results ||= user.preferences.max_results
- max_results ||= CONFIG.default_user_preferences.max_results
-
- page = env.params.query["page"]?.try &.to_i?
- page ||= 1
-
- videos, notifications = get_subscription_feed(PG_DB, user, max_results, page)
-
- JSON.build do |json|
- json.object do
- json.field "notifications" do
- json.array do
- notifications.each do |video|
- video.to_json(locale, json)
- end
- end
- end
-
- json.field "videos" do
- json.array do
- videos.each do |video|
- video.to_json(locale, json)
- end
- end
- end
- end
- end
-end
-
-get "/api/v1/auth/subscriptions" do |env|
- env.response.content_type = "application/json"
- user = env.get("user").as(User)
-
- if user.subscriptions.empty?
- values = "'{}'"
- else
- values = "VALUES #{user.subscriptions.map { |id| %(('#{id}')) }.join(",")}"
- end
-
- subscriptions = PG_DB.query_all("SELECT * FROM channels WHERE id = ANY(#{values})", as: InvidiousChannel)
-
- JSON.build do |json|
- json.array do
- subscriptions.each do |subscription|
- json.object do
- json.field "author", subscription.author
- json.field "authorId", subscription.id
- end
- end
- end
- end
-end
-
-post "/api/v1/auth/subscriptions/:ucid" do |env|
- env.response.content_type = "application/json"
- user = env.get("user").as(User)
-
- ucid = env.params.url["ucid"]
-
- if !user.subscriptions.includes? ucid
- get_channel(ucid, PG_DB, false, false)
- PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
- end
-
- # For Google accounts, access tokens don't have enough information to
- # make a request on the user's behalf, which is why we don't sync with
- # YouTube.
-
- env.response.status_code = 204
-end
-
-delete "/api/v1/auth/subscriptions/:ucid" do |env|
- env.response.content_type = "application/json"
- user = env.get("user").as(User)
-
- ucid = env.params.url["ucid"]
-
- PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = array_remove(subscriptions, $1) WHERE email = $2", ucid, user.email)
-
- env.response.status_code = 204
-end
-
-get "/api/v1/auth/playlists" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
- user = env.get("user").as(User)
-
- playlists = PG_DB.query_all("SELECT * FROM playlists WHERE author = $1", user.email, as: InvidiousPlaylist)
-
- JSON.build do |json|
- json.array do
- playlists.each do |playlist|
- playlist.to_json(0, locale, json)
- end
- end
- end
-end
-
-post "/api/v1/auth/playlists" do |env|
- env.response.content_type = "application/json"
- user = env.get("user").as(User)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- title = env.params.json["title"]?.try &.as(String).delete("<>").byte_slice(0, 150)
- if !title
- next error_json(400, "Invalid title.")
- end
-
- privacy = env.params.json["privacy"]?.try { |privacy| PlaylistPrivacy.parse(privacy.as(String).downcase) }
- if !privacy
- next error_json(400, "Invalid privacy setting.")
- end
-
- if PG_DB.query_one("SELECT count(*) FROM playlists WHERE author = $1", user.email, as: Int64) >= 100
- next error_json(400, "User cannot have more than 100 playlists.")
- end
-
- playlist = create_playlist(PG_DB, title, privacy, user)
- env.response.headers["Location"] = "#{HOST_URL}/api/v1/auth/playlists/#{playlist.id}"
- env.response.status_code = 201
- {
- "title" => title,
- "playlistId" => playlist.id,
- }.to_json
-end
-
-patch "/api/v1/auth/playlists/:plid" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
- user = env.get("user").as(User)
-
- plid = env.params.url["plid"]
-
- playlist = PG_DB.query_one?("SELECT * FROM playlists WHERE id = $1", plid, as: InvidiousPlaylist)
- if !playlist || playlist.author != user.email && playlist.privacy.private?
- next error_json(404, "Playlist does not exist.")
- end
-
- if playlist.author != user.email
- next error_json(403, "Invalid user")
- end
-
- title = env.params.json["title"].try &.as(String).delete("<>").byte_slice(0, 150) || playlist.title
- privacy = env.params.json["privacy"]?.try { |privacy| PlaylistPrivacy.parse(privacy.as(String).downcase) } || playlist.privacy
- description = env.params.json["description"]?.try &.as(String).delete("\r") || playlist.description
-
- if title != playlist.title ||
- privacy != playlist.privacy ||
- description != playlist.description
- updated = Time.utc
- else
- updated = playlist.updated
- end
-
- PG_DB.exec("UPDATE playlists SET title = $1, privacy = $2, description = $3, updated = $4 WHERE id = $5", title, privacy, description, updated, plid)
- env.response.status_code = 204
-end
-
-delete "/api/v1/auth/playlists/:plid" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
- user = env.get("user").as(User)
-
- plid = env.params.url["plid"]
-
- playlist = PG_DB.query_one?("SELECT * FROM playlists WHERE id = $1", plid, as: InvidiousPlaylist)
- if !playlist || playlist.author != user.email && playlist.privacy.private?
- next error_json(404, "Playlist does not exist.")
- end
-
- if playlist.author != user.email
- next error_json(403, "Invalid user")
- end
-
- PG_DB.exec("DELETE FROM playlist_videos * WHERE plid = $1", plid)
- PG_DB.exec("DELETE FROM playlists * WHERE id = $1", plid)
+# Routing
- env.response.status_code = 204
-end
-
-post "/api/v1/auth/playlists/:plid/videos" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
- user = env.get("user").as(User)
-
- plid = env.params.url["plid"]
-
- playlist = PG_DB.query_one?("SELECT * FROM playlists WHERE id = $1", plid, as: InvidiousPlaylist)
- if !playlist || playlist.author != user.email && playlist.privacy.private?
- next error_json(404, "Playlist does not exist.")
- end
-
- if playlist.author != user.email
- next error_json(403, "Invalid user")
- end
-
- if playlist.index.size >= 500
- next error_json(400, "Playlist cannot have more than 500 videos")
- end
-
- video_id = env.params.json["videoId"].try &.as(String)
- if !video_id
- next error_json(403, "Invalid videoId")
- end
-
- begin
- video = get_video(video_id, PG_DB)
- rescue ex
- next error_json(500, ex)
- end
-
- playlist_video = PlaylistVideo.new({
- title: video.title,
- id: video.id,
- author: video.author,
- ucid: video.ucid,
- length_seconds: video.length_seconds,
- published: video.published,
- plid: plid,
- live_now: video.live_now,
- index: Random::Secure.rand(0_i64..Int64::MAX),
- })
-
- video_array = playlist_video.to_a
- args = arg_array(video_array)
-
- PG_DB.exec("INSERT INTO playlist_videos VALUES (#{args})", args: video_array)
- PG_DB.exec("UPDATE playlists SET index = array_append(index, $1), video_count = cardinality(index) + 1, updated = $2 WHERE id = $3", playlist_video.index, Time.utc, plid)
-
- env.response.headers["Location"] = "#{HOST_URL}/api/v1/auth/playlists/#{plid}/videos/#{playlist_video.index.to_u64.to_s(16).upcase}"
- env.response.status_code = 201
- playlist_video.to_json(locale, index: playlist.index.size)
-end
-
-delete "/api/v1/auth/playlists/:plid/videos/:index" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- env.response.content_type = "application/json"
- user = env.get("user").as(User)
-
- plid = env.params.url["plid"]
- index = env.params.url["index"].to_i64(16)
-
- playlist = PG_DB.query_one?("SELECT * FROM playlists WHERE id = $1", plid, as: InvidiousPlaylist)
- if !playlist || playlist.author != user.email && playlist.privacy.private?
- next error_json(404, "Playlist does not exist.")
- end
-
- if playlist.author != user.email
- next error_json(403, "Invalid user")
- end
-
- if !playlist.index.includes? index
- next error_json(404, "Playlist does not contain index")
- end
-
- PG_DB.exec("DELETE FROM playlist_videos * WHERE index = $1", index)
- PG_DB.exec("UPDATE playlists SET index = array_remove(index, $1), video_count = cardinality(index) - 1, updated = $2 WHERE id = $3", index, Time.utc, plid)
-
- env.response.status_code = 204
-end
-
-# patch "/api/v1/auth/playlists/:plid/videos/:index" do |env|
-# TODO: Playlist stub
-# end
-
-get "/api/v1/auth/tokens" do |env|
- env.response.content_type = "application/json"
- user = env.get("user").as(User)
- scopes = env.get("scopes").as(Array(String))
-
- tokens = PG_DB.query_all("SELECT id, issued FROM session_ids WHERE email = $1", user.email, as: {session: String, issued: Time})
-
- JSON.build do |json|
- json.array do
- tokens.each do |token|
- json.object do
- json.field "session", token[:session]
- json.field "issued", token[:issued].to_unix
- end
- end
- end
- end
-end
-
-post "/api/v1/auth/tokens/register" do |env|
- user = env.get("user").as(User)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
-
- case env.request.headers["Content-Type"]?
- when "application/x-www-form-urlencoded"
- scopes = env.params.body.select { |k, v| k.match(/^scopes\[\d+\]$/) }.map { |k, v| v }
- callback_url = env.params.body["callbackUrl"]?
- expire = env.params.body["expire"]?.try &.to_i?
- when "application/json"
- scopes = env.params.json["scopes"].as(Array).map { |v| v.as_s }
- callback_url = env.params.json["callbackUrl"]?.try &.as(String)
- expire = env.params.json["expire"]?.try &.as(Int64)
- else
- next error_json(400, "Invalid or missing header 'Content-Type'")
- end
-
- if callback_url && callback_url.empty?
- callback_url = nil
- end
-
- if callback_url
- callback_url = URI.parse(callback_url)
- end
-
- if sid = env.get?("sid").try &.as(String)
- env.response.content_type = "text/html"
-
- csrf_token = generate_response(sid, {":authorize_token"}, HMAC_KEY, PG_DB, use_nonce: true)
- next templated "authorize_token"
- else
- env.response.content_type = "application/json"
-
- superset_scopes = env.get("scopes").as(Array(String))
-
- authorized_scopes = [] of String
- scopes.each do |scope|
- if scopes_include_scope(superset_scopes, scope)
- authorized_scopes << scope
- end
- end
-
- access_token = generate_token(user.email, authorized_scopes, expire, HMAC_KEY, PG_DB)
-
- if callback_url
- access_token = URI.encode_www_form(access_token)
-
- if query = callback_url.query
- query = HTTP::Params.parse(query.not_nil!)
- else
- query = HTTP::Params.new
- end
-
- query["token"] = access_token
- callback_url.query = query.to_s
-
- env.redirect callback_url.to_s
- else
- access_token
- end
- end
-end
-
-post "/api/v1/auth/tokens/unregister" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
- env.response.content_type = "application/json"
- user = env.get("user").as(User)
- scopes = env.get("scopes").as(Array(String))
-
- session = env.params.json["session"]?.try &.as(String)
- session ||= env.get("session").as(String)
-
- # Allow tokens to revoke other tokens with correct scope
- if session == env.get("session").as(String)
- PG_DB.exec("DELETE FROM session_ids * WHERE id = $1", session)
- elsif scopes_include_scope(scopes, "GET:tokens")
- PG_DB.exec("DELETE FROM session_ids * WHERE id = $1", session)
- else
- next error_json(400, "Cannot revoke session #{session}")
- end
-
- env.response.status_code = 204
-end
-
-get "/api/manifest/dash/id/videoplayback" do |env|
- env.response.headers.delete("Content-Type")
- env.response.headers["Access-Control-Allow-Origin"] = "*"
- env.redirect "/videoplayback?#{env.params.query}"
-end
-
-get "/api/manifest/dash/id/videoplayback/*" do |env|
- env.response.headers.delete("Content-Type")
- env.response.headers["Access-Control-Allow-Origin"] = "*"
- env.redirect env.request.path.lchop("/api/manifest/dash/id")
-end
-
-get "/api/manifest/dash/id/:id" do |env|
- env.response.headers.add("Access-Control-Allow-Origin", "*")
- env.response.content_type = "application/dash+xml"
-
- local = env.params.query["local"]?.try &.== "true"
- id = env.params.url["id"]
- region = env.params.query["region"]?
-
- # Since some implementations create playlists based on resolution regardless of different codecs,
- # we can opt to only add a source to a representation if it has a unique height within that representation
- unique_res = env.params.query["unique_res"]?.try { |q| (q == "true" || q == "1").to_unsafe }
-
- begin
- video = get_video(id, PG_DB, region: region)
- rescue ex : VideoRedirect
- next env.redirect env.request.resource.gsub(id, ex.video_id)
- rescue ex
- env.response.status_code = 403
- next
- end
-
- if dashmpd = video.dash_manifest_url
- manifest = YT_POOL.client &.get(URI.parse(dashmpd).request_target).body
-
- manifest = manifest.gsub(/<BaseURL>[^<]+<\/BaseURL>/) do |baseurl|
- url = baseurl.lchop("<BaseURL>")
- url = url.rchop("</BaseURL>")
-
- if local
- uri = URI.parse(url)
- url = "#{uri.request_target}host/#{uri.host}/"
- end
-
- "<BaseURL>#{url}</BaseURL>"
- end
-
- next manifest
- end
-
- adaptive_fmts = video.adaptive_fmts
-
- if local
- adaptive_fmts.each do |fmt|
- fmt["url"] = JSON::Any.new(URI.parse(fmt["url"].as_s).request_target)
- end
- end
-
- audio_streams = video.audio_streams
- video_streams = video.video_streams.sort_by { |stream| {stream["width"].as_i, stream["fps"].as_i} }.reverse
-
- XML.build(indent: " ", encoding: "UTF-8") do |xml|
- xml.element("MPD", "xmlns": "urn:mpeg:dash:schema:mpd:2011",
- "profiles": "urn:mpeg:dash:profile:full:2011", minBufferTime: "PT1.5S", type: "static",
- mediaPresentationDuration: "PT#{video.length_seconds}S") do
- xml.element("Period") do
- i = 0
-
- {"audio/mp4", "audio/webm"}.each do |mime_type|
- mime_streams = audio_streams.select { |stream| stream["mimeType"].as_s.starts_with? mime_type }
- next if mime_streams.empty?
-
- xml.element("AdaptationSet", id: i, mimeType: mime_type, startWithSAP: 1, subsegmentAlignment: true) do
- mime_streams.each do |fmt|
- codecs = fmt["mimeType"].as_s.split("codecs=")[1].strip('"')
- bandwidth = fmt["bitrate"].as_i
- itag = fmt["itag"].as_i
- url = fmt["url"].as_s
-
- xml.element("Representation", id: fmt["itag"], codecs: codecs, bandwidth: bandwidth) do
- xml.element("AudioChannelConfiguration", schemeIdUri: "urn:mpeg:dash:23003:3:audio_channel_configuration:2011",
- value: "2")
- xml.element("BaseURL") { xml.text url }
- xml.element("SegmentBase", indexRange: "#{fmt["indexRange"]["start"]}-#{fmt["indexRange"]["end"]}") do
- xml.element("Initialization", range: "#{fmt["initRange"]["start"]}-#{fmt["initRange"]["end"]}")
- end
- end
- end
- end
-
- i += 1
- end
-
- potential_heights = {4320, 2160, 1440, 1080, 720, 480, 360, 240, 144}
-
- {"video/mp4", "video/webm"}.each do |mime_type|
- mime_streams = video_streams.select { |stream| stream["mimeType"].as_s.starts_with? mime_type }
- next if mime_streams.empty?
-
- heights = [] of Int32
- xml.element("AdaptationSet", id: i, mimeType: mime_type, startWithSAP: 1, subsegmentAlignment: true, scanType: "progressive") do
- mime_streams.each do |fmt|
- codecs = fmt["mimeType"].as_s.split("codecs=")[1].strip('"')
- bandwidth = fmt["bitrate"].as_i
- itag = fmt["itag"].as_i
- url = fmt["url"].as_s
- width = fmt["width"].as_i
- height = fmt["height"].as_i
-
- # Resolutions reported by YouTube player (may not accurately reflect source)
- height = potential_heights.min_by { |i| (height - i).abs }
- next if unique_res && heights.includes? height
- heights << height
-
- xml.element("Representation", id: itag, codecs: codecs, width: width, height: height,
- startWithSAP: "1", maxPlayoutRate: "1",
- bandwidth: bandwidth, frameRate: fmt["fps"]) do
- xml.element("BaseURL") { xml.text url }
- xml.element("SegmentBase", indexRange: "#{fmt["indexRange"]["start"]}-#{fmt["indexRange"]["end"]}") do
- xml.element("Initialization", range: "#{fmt["initRange"]["start"]}-#{fmt["initRange"]["end"]}")
- end
- end
- end
- end
-
- i += 1
- end
- end
- end
- end
-end
-
-get "/api/manifest/hls_variant/*" do |env|
- response = YT_POOL.client &.get(env.request.path)
-
- if response.status_code != 200
- env.response.status_code = response.status_code
- next
- end
-
- local = env.params.query["local"]?.try &.== "true"
-
- env.response.content_type = "application/x-mpegURL"
- env.response.headers.add("Access-Control-Allow-Origin", "*")
-
- manifest = response.body
-
- if local
- manifest = manifest.gsub("https://www.youtube.com", HOST_URL)
- manifest = manifest.gsub("index.m3u8", "index.m3u8?local=true")
- end
-
- manifest
-end
-
-get "/api/manifest/hls_playlist/*" do |env|
- response = YT_POOL.client &.get(env.request.path)
-
- if response.status_code != 200
- env.response.status_code = response.status_code
- next
- end
-
- local = env.params.query["local"]?.try &.== "true"
-
- env.response.content_type = "application/x-mpegURL"
- env.response.headers.add("Access-Control-Allow-Origin", "*")
-
- manifest = response.body
-
- if local
- manifest = manifest.gsub(/^https:\/\/r\d---.{11}\.c\.youtube\.com[^\n]*/m) do |match|
- path = URI.parse(match).path
-
- path = path.lchop("/videoplayback/")
- path = path.rchop("/")
-
- path = path.gsub(/mime\/\w+\/\w+/) do |mimetype|
- mimetype = mimetype.split("/")
- mimetype[0] + "/" + mimetype[1] + "%2F" + mimetype[2]
- end
-
- path = path.split("/")
-
- raw_params = {} of String => Array(String)
- path.each_slice(2) do |pair|
- key, value = pair
- value = URI.decode_www_form(value)
-
- if raw_params[key]?
- raw_params[key] << value
- else
- raw_params[key] = [value]
- end
- end
-
- raw_params = HTTP::Params.new(raw_params)
- if fvip = raw_params["hls_chunk_host"].match(/r(?<fvip>\d+)---/)
- raw_params["fvip"] = fvip["fvip"]
- end
-
- raw_params["local"] = "true"
-
- "#{HOST_URL}/videoplayback?#{raw_params}"
- end
- end
-
- manifest
-end
-
-# YouTube /videoplayback links expire after 6 hours,
-# so we have a mechanism here to redirect to the latest version
-get "/latest_version" do |env|
- if env.params.query["download_widget"]?
- download_widget = JSON.parse(env.params.query["download_widget"])
-
- id = download_widget["id"].as_s
- title = download_widget["title"].as_s
-
- if label = download_widget["label"]?
- env.redirect "/api/v1/captions/#{id}?label=#{label}&title=#{title}"
- next
- else
- itag = download_widget["itag"].as_s.to_i
- local = "true"
- end
- end
-
- id ||= env.params.query["id"]?
- itag ||= env.params.query["itag"]?.try &.to_i
-
- region = env.params.query["region"]?
-
- local ||= env.params.query["local"]?
- local ||= "false"
- local = local == "true"
-
- if !id || !itag
- env.response.status_code = 400
- next
- end
-
- video = get_video(id, PG_DB, region: region)
-
- fmt = video.fmt_stream.find(nil) { |f| f["itag"].as_i == itag } || video.adaptive_fmts.find(nil) { |f| f["itag"].as_i == itag }
- url = fmt.try &.["url"]?.try &.as_s
-
- if !url
- env.response.status_code = 404
- next
- end
-
- url = URI.parse(url).request_target.not_nil! if local
- url = "#{url}&title=#{title}" if title
-
- env.redirect url
-end
-
-options "/videoplayback" do |env|
- env.response.headers.delete("Content-Type")
- env.response.headers["Access-Control-Allow-Origin"] = "*"
- env.response.headers["Access-Control-Allow-Methods"] = "GET, OPTIONS"
- env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, Range"
-end
-
-options "/videoplayback/*" do |env|
- env.response.headers.delete("Content-Type")
- env.response.headers["Access-Control-Allow-Origin"] = "*"
- env.response.headers["Access-Control-Allow-Methods"] = "GET, OPTIONS"
- env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, Range"
-end
-
-options "/api/manifest/dash/id/videoplayback" do |env|
- env.response.headers.delete("Content-Type")
- env.response.headers["Access-Control-Allow-Origin"] = "*"
- env.response.headers["Access-Control-Allow-Methods"] = "GET, OPTIONS"
- env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, Range"
-end
-
-options "/api/manifest/dash/id/videoplayback/*" do |env|
- env.response.headers.delete("Content-Type")
- env.response.headers["Access-Control-Allow-Origin"] = "*"
- env.response.headers["Access-Control-Allow-Methods"] = "GET, OPTIONS"
- env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, Range"
-end
-
-get "/videoplayback/*" do |env|
- path = env.request.path
-
- path = path.lchop("/videoplayback/")
- path = path.rchop("/")
-
- path = path.gsub(/mime\/\w+\/\w+/) do |mimetype|
- mimetype = mimetype.split("/")
- mimetype[0] + "/" + mimetype[1] + "%2F" + mimetype[2]
- end
-
- path = path.split("/")
-
- raw_params = {} of String => Array(String)
- path.each_slice(2) do |pair|
- key, value = pair
- value = URI.decode_www_form(value)
-
- if raw_params[key]?
- raw_params[key] << value
- else
- raw_params[key] = [value]
- end
- end
-
- query_params = HTTP::Params.new(raw_params)
-
- env.response.headers["Access-Control-Allow-Origin"] = "*"
- env.redirect "/videoplayback?#{query_params}"
-end
-
-get "/videoplayback" do |env|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
- query_params = env.params.query
-
- fvip = query_params["fvip"]? || "3"
- mns = query_params["mn"]?.try &.split(",")
- mns ||= [] of String
-
- if query_params["region"]?
- region = query_params["region"]
- query_params.delete("region")
- end
-
- if query_params["host"]? && !query_params["host"].empty?
- host = "https://#{query_params["host"]}"
- query_params.delete("host")
- else
- host = "https://r#{fvip}---#{mns.pop}.googlevideo.com"
- end
-
- url = "/videoplayback?#{query_params.to_s}"
-
- headers = HTTP::Headers.new
- REQUEST_HEADERS_WHITELIST.each do |header|
- if env.request.headers[header]?
- headers[header] = env.request.headers[header]
- end
- end
-
- client = make_client(URI.parse(host), region)
- response = HTTP::Client::Response.new(500)
- error = ""
- 5.times do
- begin
- response = client.head(url, headers)
-
- if response.headers["Location"]?
- location = URI.parse(response.headers["Location"])
- env.response.headers["Access-Control-Allow-Origin"] = "*"
-
- new_host = "#{location.scheme}://#{location.host}"
- if new_host != host
- host = new_host
- client.close
- client = make_client(URI.parse(new_host), region)
- end
-
- url = "#{location.request_target}&host=#{location.host}#{region ? "&region=#{region}" : ""}"
- else
- break
- end
- rescue Socket::Addrinfo::Error
- if !mns.empty?
- mn = mns.pop
- end
- fvip = "3"
-
- host = "https://r#{fvip}---#{mn}.googlevideo.com"
- client = make_client(URI.parse(host), region)
- rescue ex
- error = ex.message
- end
- end
-
- if response.status_code >= 400
- env.response.status_code = response.status_code
- env.response.content_type = "text/plain"
- next error
- end
-
- if url.includes? "&file=seg.ts"
- if CONFIG.disabled?("livestreams")
- next error_template(403, "Administrator has disabled this endpoint.")
- end
-
- begin
- client.get(url, headers) do |response|
- response.headers.each do |key, value|
- if !RESPONSE_HEADERS_BLACKLIST.includes?(key.downcase)
- env.response.headers[key] = value
- end
- end
-
- env.response.headers["Access-Control-Allow-Origin"] = "*"
-
- if location = response.headers["Location"]?
- location = URI.parse(location)
- location = "#{location.request_target}&host=#{location.host}"
-
- if region
- location += "&region=#{region}"
- end
-
- next env.redirect location
- end
-
- IO.copy(response.body_io, env.response)
- end
- rescue ex
- end
- else
- if query_params["title"]? && CONFIG.disabled?("downloads") ||
- CONFIG.disabled?("dash")
- next error_template(403, "Administrator has disabled this endpoint.")
- end
-
- content_length = nil
- first_chunk = true
- range_start, range_end = parse_range(env.request.headers["Range"]?)
- chunk_start = range_start
- chunk_end = range_end
-
- if !chunk_end || chunk_end - chunk_start > HTTP_CHUNK_SIZE
- chunk_end = chunk_start + HTTP_CHUNK_SIZE - 1
- end
-
- # TODO: Record bytes written so we can restart after a chunk fails
- while true
- if !range_end && content_length
- range_end = content_length
- end
-
- if range_end && chunk_start > range_end
- break
- end
-
- if range_end && chunk_end > range_end
- chunk_end = range_end
- end
-
- headers["Range"] = "bytes=#{chunk_start}-#{chunk_end}"
-
- begin
- client.get(url, headers) do |response|
- if first_chunk
- if !env.request.headers["Range"]? && response.status_code == 206
- env.response.status_code = 200
- else
- env.response.status_code = response.status_code
- end
-
- response.headers.each do |key, value|
- if !RESPONSE_HEADERS_BLACKLIST.includes?(key.downcase) && key.downcase != "content-range"
- env.response.headers[key] = value
- end
- end
-
- env.response.headers["Access-Control-Allow-Origin"] = "*"
-
- if location = response.headers["Location"]?
- location = URI.parse(location)
- location = "#{location.request_target}&host=#{location.host}#{region ? "&region=#{region}" : ""}"
-
- env.redirect location
- break
- end
-
- if title = query_params["title"]?
- # https://blog.fastmail.com/2011/06/24/download-non-english-filenames/
- env.response.headers["Content-Disposition"] = "attachment; filename=\"#{URI.encode_www_form(title)}\"; filename*=UTF-8''#{URI.encode_www_form(title)}"
- end
-
- if !response.headers.includes_word?("Transfer-Encoding", "chunked")
- content_length = response.headers["Content-Range"].split("/")[-1].to_i64
- if env.request.headers["Range"]?
- env.response.headers["Content-Range"] = "bytes #{range_start}-#{range_end || (content_length - 1)}/#{content_length}"
- env.response.content_length = ((range_end.try &.+ 1) || content_length) - range_start
- else
- env.response.content_length = content_length
- end
- end
- end
-
- proxy_file(response, env)
- end
- rescue ex
- if ex.message != "Error reading socket: Connection reset by peer"
- break
- else
- client.close
- client = make_client(URI.parse(host), region)
- end
- end
-
- chunk_start = chunk_end + 1
- chunk_end += HTTP_CHUNK_SIZE
- first_chunk = false
- end
- end
- client.close
-end
-
-get "/ggpht/*" do |env|
- url = env.request.path.lchop("/ggpht")
-
- headers = HTTP::Headers{":authority" => "yt3.ggpht.com"}
- REQUEST_HEADERS_WHITELIST.each do |header|
- if env.request.headers[header]?
- headers[header] = env.request.headers[header]
- end
- end
-
- begin
- YT_POOL.client &.get(url, headers) do |response|
- env.response.status_code = response.status_code
- response.headers.each do |key, value|
- if !RESPONSE_HEADERS_BLACKLIST.includes?(key.downcase)
- env.response.headers[key] = value
- end
- end
-
- env.response.headers["Access-Control-Allow-Origin"] = "*"
-
- if response.status_code >= 300
- env.response.headers.delete("Transfer-Encoding")
- break
- end
-
- proxy_file(response, env)
- end
- rescue ex
- end
-end
-
-options "/sb/:authority/:id/:storyboard/:index" do |env|
- env.response.headers["Access-Control-Allow-Origin"] = "*"
- env.response.headers["Access-Control-Allow-Methods"] = "GET, OPTIONS"
- env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, Range"
-end
-
-get "/sb/:authority/:id/:storyboard/:index" do |env|
- authority = env.params.url["authority"]
- id = env.params.url["id"]
- storyboard = env.params.url["storyboard"]
- index = env.params.url["index"]
-
- url = "/sb/#{id}/#{storyboard}/#{index}?#{env.params.query}"
-
- headers = HTTP::Headers.new
-
- headers[":authority"] = "#{authority}.ytimg.com"
-
- REQUEST_HEADERS_WHITELIST.each do |header|
- if env.request.headers[header]?
- headers[header] = env.request.headers[header]
- end
- end
-
- begin
- YT_POOL.client &.get(url, headers) do |response|
- env.response.status_code = response.status_code
- response.headers.each do |key, value|
- if !RESPONSE_HEADERS_BLACKLIST.includes?(key.downcase)
- env.response.headers[key] = value
- end
- end
-
- env.response.headers["Connection"] = "close"
- env.response.headers["Access-Control-Allow-Origin"] = "*"
-
- if response.status_code >= 300
- env.response.headers.delete("Transfer-Encoding")
- break
- end
-
- proxy_file(response, env)
- end
- rescue ex
- end
-end
-
-get "/s_p/:id/:name" do |env|
- id = env.params.url["id"]
- name = env.params.url["name"]
-
- url = env.request.resource
-
- headers = HTTP::Headers{":authority" => "i9.ytimg.com"}
- REQUEST_HEADERS_WHITELIST.each do |header|
- if env.request.headers[header]?
- headers[header] = env.request.headers[header]
- end
- end
-
- begin
- YT_POOL.client &.get(url, headers) do |response|
- env.response.status_code = response.status_code
- response.headers.each do |key, value|
- if !RESPONSE_HEADERS_BLACKLIST.includes?(key.downcase)
- env.response.headers[key] = value
- end
- end
-
- env.response.headers["Access-Control-Allow-Origin"] = "*"
-
- if response.status_code >= 300 && response.status_code != 404
- env.response.headers.delete("Transfer-Encoding")
- break
- end
-
- proxy_file(response, env)
- end
- rescue ex
- end
-end
-
-get "/yts/img/:name" do |env|
- headers = HTTP::Headers.new
- REQUEST_HEADERS_WHITELIST.each do |header|
- if env.request.headers[header]?
- headers[header] = env.request.headers[header]
- end
- end
-
- begin
- YT_POOL.client &.get(env.request.resource, headers) do |response|
- env.response.status_code = response.status_code
- response.headers.each do |key, value|
- if !RESPONSE_HEADERS_BLACKLIST.includes?(key.downcase)
- env.response.headers[key] = value
- end
- end
-
- env.response.headers["Access-Control-Allow-Origin"] = "*"
-
- if response.status_code >= 300 && response.status_code != 404
- env.response.headers.delete("Transfer-Encoding")
- break
- end
-
- proxy_file(response, env)
- end
- rescue ex
- end
-end
-
-get "/vi/:id/:name" do |env|
- id = env.params.url["id"]
- name = env.params.url["name"]
-
- headers = HTTP::Headers{":authority" => "i.ytimg.com"}
-
- if name == "maxres.jpg"
- build_thumbnails(id).each do |thumb|
- if YT_POOL.client &.head("/vi/#{id}/#{thumb[:url]}.jpg", headers).status_code == 200
- name = thumb[:url] + ".jpg"
- break
- end
- end
- end
- url = "/vi/#{id}/#{name}"
-
- REQUEST_HEADERS_WHITELIST.each do |header|
- if env.request.headers[header]?
- headers[header] = env.request.headers[header]
- end
- end
-
- begin
- YT_POOL.client &.get(url, headers) do |response|
- env.response.status_code = response.status_code
- response.headers.each do |key, value|
- if !RESPONSE_HEADERS_BLACKLIST.includes?(key.downcase)
- env.response.headers[key] = value
- end
- end
-
- env.response.headers["Access-Control-Allow-Origin"] = "*"
-
- if response.status_code >= 300 && response.status_code != 404
- env.response.headers.delete("Transfer-Encoding")
- break
- end
-
- proxy_file(response, env)
- end
- rescue ex
- end
-end
-
-get "/Captcha" do |env|
- headers = HTTP::Headers{":authority" => "accounts.google.com"}
- response = YT_POOL.client &.get(env.request.resource, headers)
- env.response.headers["Content-Type"] = response.headers["Content-Type"]
- response.body
+before_all do |env|
+ Invidious::Routes::BeforeAll.handle(env)
end
-# Undocumented, creates anonymous playlist with specified 'video_ids', max 50 videos
-get "/watch_videos" do |env|
- response = YT_POOL.client &.get(env.request.resource)
- if url = response.headers["Location"]?
- url = URI.parse(url).request_target
- next env.redirect url
- end
-
- env.response.status_code = response.status_code
-end
+Invidious::Routing.register_all
error 404 do |env|
- if md = env.request.path.match(/^\/(?<id>([a-zA-Z0-9_-]{11})|(\w+))$/)
- item = md["id"]
-
- # Check if item is branding URL e.g. https://youtube.com/gaming
- response = YT_POOL.client &.get("/#{item}")
-
- if response.status_code == 301
- response = YT_POOL.client &.get(URI.parse(response.headers["Location"]).request_target)
- end
-
- if response.body.empty?
- env.response.headers["Location"] = "/"
- halt env, status_code: 302
- end
-
- html = XML.parse_html(response.body)
- ucid = html.xpath_node(%q(//link[@rel="canonical"])).try &.["href"].split("/")[-1]
-
- if ucid
- env.response.headers["Location"] = "/channel/#{ucid}"
- halt env, status_code: 302
- end
-
- params = [] of String
- env.params.query.each do |k, v|
- params << "#{k}=#{v}"
- end
- params = params.join("&")
-
- url = "/watch?v=#{item}"
- if !params.empty?
- url += "&#{params}"
- end
-
- # Check if item is video ID
- if item.match(/^[a-zA-Z0-9_-]{11}$/) && YT_POOL.client &.head("/watch?v=#{item}").status_code != 404
- env.response.headers["Location"] = url
- halt env, status_code: 302
- end
- end
-
- env.response.headers["Location"] = "/"
- halt env, status_code: 302
+ Invidious::Routes::ErrorRoutes.error_404(env)
end
error 500 do |env, ex|
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
error_template(500, ex)
end
-static_headers do |response, filepath, filestat|
+static_headers do |response|
response.headers.add("Cache-Control", "max-age=2629800")
end
+# Init Kemal
+
public_folder "assets"
Kemal.config.powered_by_header = false
@@ -3914,9 +236,16 @@ add_handler AuthHandler.new
add_handler DenyFrame.new
add_context_storage_type(Array(String))
add_context_storage_type(Preferences)
-add_context_storage_type(User)
+add_context_storage_type(Invidious::User)
Kemal.config.logger = LOGGER
+Kemal.config.app_name = "Invidious"
+
+# Use in kemal's production mode.
+# Users can also set the KEMAL_ENV environmental variable for this to be set automatically.
+{% if flag?(:release) || flag?(:production) %}
+ Kemal.config.env = "production" if !ENV.has_key?("KEMAL_ENV")
+{% end %}
Kemal.run do |config|
if CONFIG.bind_unix
@@ -3925,7 +254,7 @@ Kemal.run do |config|
end
config.server.not_nil!.bind_unix CONFIG.bind_unix.not_nil!
else
- config.host_binding = config.host_binding != "0.0.0.0" ? config.host_binding : CONFIG.host_binding
- config.port = config.port != 3000 ? config.port : CONFIG.port
+ Kemal.config.host_binding = Kemal.config.host_binding != "0.0.0.0" ? Kemal.config.host_binding : CONFIG.host_binding
+ Kemal.config.port = Kemal.config.port != 3000 ? Kemal.config.port : CONFIG.port
end
end
diff --git a/src/invidious/channels.cr b/src/invidious/channels.cr
deleted file mode 100644
index 3109b508..00000000
--- a/src/invidious/channels.cr
+++ /dev/null
@@ -1,984 +0,0 @@
-struct InvidiousChannel
- include DB::Serializable
-
- property id : String
- property author : String
- property updated : Time
- property deleted : Bool
- property subscribed : Time?
-end
-
-struct ChannelVideo
- include DB::Serializable
-
- property id : String
- property title : String
- property published : Time
- property updated : Time
- property ucid : String
- property author : String
- property length_seconds : Int32 = 0
- property live_now : Bool = false
- property premiere_timestamp : Time? = nil
- property views : Int64? = nil
-
- def to_json(locale, json : JSON::Builder)
- json.object do
- json.field "type", "shortVideo"
-
- json.field "title", self.title
- json.field "videoId", self.id
- json.field "videoThumbnails" do
- generate_thumbnails(json, self.id)
- end
-
- json.field "lengthSeconds", self.length_seconds
-
- json.field "author", self.author
- json.field "authorId", self.ucid
- json.field "authorUrl", "/channel/#{self.ucid}"
- json.field "published", self.published.to_unix
- json.field "publishedText", translate(locale, "`x` ago", recode_date(self.published, locale))
-
- json.field "viewCount", self.views
- end
- end
-
- def to_json(locale, json : JSON::Builder | Nil = nil)
- if json
- to_json(locale, json)
- else
- JSON.build do |json|
- to_json(locale, json)
- end
- end
- end
-
- def to_xml(locale, query_params, xml : XML::Builder)
- query_params["v"] = self.id
-
- xml.element("entry") do
- xml.element("id") { xml.text "yt:video:#{self.id}" }
- xml.element("yt:videoId") { xml.text self.id }
- xml.element("yt:channelId") { xml.text self.ucid }
- xml.element("title") { xml.text self.title }
- xml.element("link", rel: "alternate", href: "#{HOST_URL}/watch?#{query_params}")
-
- xml.element("author") do
- xml.element("name") { xml.text self.author }
- xml.element("uri") { xml.text "#{HOST_URL}/channel/#{self.ucid}" }
- end
-
- xml.element("content", type: "xhtml") do
- xml.element("div", xmlns: "http://www.w3.org/1999/xhtml") do
- xml.element("a", href: "#{HOST_URL}/watch?#{query_params}") do
- xml.element("img", src: "#{HOST_URL}/vi/#{self.id}/mqdefault.jpg")
- end
- end
- end
-
- xml.element("published") { xml.text self.published.to_s("%Y-%m-%dT%H:%M:%S%:z") }
- xml.element("updated") { xml.text self.updated.to_s("%Y-%m-%dT%H:%M:%S%:z") }
-
- xml.element("media:group") do
- xml.element("media:title") { xml.text self.title }
- xml.element("media:thumbnail", url: "#{HOST_URL}/vi/#{self.id}/mqdefault.jpg",
- width: "320", height: "180")
- end
- end
- end
-
- def to_xml(locale, xml : XML::Builder | Nil = nil)
- if xml
- to_xml(locale, xml)
- else
- XML.build do |xml|
- to_xml(locale, xml)
- end
- end
- end
-
- def to_tuple
- {% begin %}
- {
- {{*@type.instance_vars.map { |var| var.name }}}
- }
- {% end %}
- end
-end
-
-struct AboutRelatedChannel
- include DB::Serializable
-
- property ucid : String
- property author : String
- property author_url : String
- property author_thumbnail : String
-end
-
-# TODO: Refactor into either SearchChannel or InvidiousChannel
-struct AboutChannel
- include DB::Serializable
-
- property ucid : String
- property author : String
- property auto_generated : Bool
- property author_url : String
- property author_thumbnail : String
- property banner : String?
- property description_html : String
- property paid : Bool
- property total_views : Int64
- property sub_count : Int32
- property joined : Time
- property is_family_friendly : Bool
- property allowed_regions : Array(String)
- property related_channels : Array(AboutRelatedChannel)
- property tabs : Array(String)
-end
-
-class ChannelRedirect < Exception
- property channel_id : String
-
- def initialize(@channel_id)
- end
-end
-
-def get_batch_channels(channels, db, refresh = false, pull_all_videos = true, max_threads = 10)
- finished_channel = Channel(String | Nil).new
-
- spawn do
- active_threads = 0
- active_channel = Channel(Nil).new
-
- channels.each do |ucid|
- if active_threads >= max_threads
- active_channel.receive
- active_threads -= 1
- end
-
- active_threads += 1
- spawn do
- begin
- get_channel(ucid, db, refresh, pull_all_videos)
- finished_channel.send(ucid)
- rescue ex
- finished_channel.send(nil)
- ensure
- active_channel.send(nil)
- end
- end
- end
- end
-
- final = [] of String
- channels.size.times do
- if ucid = finished_channel.receive
- final << ucid
- end
- end
-
- return final
-end
-
-def get_channel(id, db, refresh = true, pull_all_videos = true)
- if channel = db.query_one?("SELECT * FROM channels WHERE id = $1", id, as: InvidiousChannel)
- if refresh && Time.utc - channel.updated > 10.minutes
- channel = fetch_channel(id, db, pull_all_videos: pull_all_videos)
- channel_array = channel.to_a
- args = arg_array(channel_array)
-
- db.exec("INSERT INTO channels VALUES (#{args}) \
- ON CONFLICT (id) DO UPDATE SET author = $2, updated = $3", args: channel_array)
- end
- else
- channel = fetch_channel(id, db, pull_all_videos: pull_all_videos)
- channel_array = channel.to_a
- args = arg_array(channel_array)
-
- db.exec("INSERT INTO channels VALUES (#{args})", args: channel_array)
- end
-
- return channel
-end
-
-def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
- LOGGER.debug("fetch_channel: #{ucid}")
- LOGGER.trace("fetch_channel: #{ucid} : pull_all_videos = #{pull_all_videos}, locale = #{locale}")
-
- LOGGER.trace("fetch_channel: #{ucid} : Downloading RSS feed")
- rss = YT_POOL.client &.get("/feeds/videos.xml?channel_id=#{ucid}").body
- LOGGER.trace("fetch_channel: #{ucid} : Parsing RSS feed")
- rss = XML.parse_html(rss)
-
- author = rss.xpath_node(%q(//feed/title))
- if !author
- raise InfoException.new("Deleted or invalid channel")
- end
- author = author.content
-
- # Auto-generated channels
- # https://support.google.com/youtube/answer/2579942
- if author.ends_with?(" - Topic") ||
- {"Popular on YouTube", "Music", "Sports", "Gaming"}.includes? author
- auto_generated = true
- end
-
- LOGGER.trace("fetch_channel: #{ucid} : author = #{author}, auto_generated = #{auto_generated}")
-
- page = 1
-
- LOGGER.trace("fetch_channel: #{ucid} : Downloading channel videos page")
- response_body = get_channel_videos_response(ucid, page, auto_generated: auto_generated)
-
- videos = [] of SearchVideo
- begin
- initial_data = JSON.parse(response_body)
- raise InfoException.new("Could not extract channel JSON") if !initial_data
-
- LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel videos page initial_data")
- videos = extract_videos(initial_data.as_h, author, ucid)
- rescue ex
- if response_body.includes?("To continue with your YouTube experience, please fill out the form below.") ||
- response_body.includes?("https://www.google.com/sorry/index")
- raise InfoException.new("Could not extract channel info. Instance is likely blocked.")
- end
- raise ex
- end
-
- LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel RSS feed")
- rss.xpath_nodes("//feed/entry").each do |entry|
- video_id = entry.xpath_node("videoid").not_nil!.content
- title = entry.xpath_node("title").not_nil!.content
- published = Time.parse_rfc3339(entry.xpath_node("published").not_nil!.content)
- updated = Time.parse_rfc3339(entry.xpath_node("updated").not_nil!.content)
- author = entry.xpath_node("author/name").not_nil!.content
- ucid = entry.xpath_node("channelid").not_nil!.content
- views = entry.xpath_node("group/community/statistics").try &.["views"]?.try &.to_i64?
- views ||= 0_i64
-
- channel_video = videos.select { |video| video.id == video_id }[0]?
-
- length_seconds = channel_video.try &.length_seconds
- length_seconds ||= 0
-
- live_now = channel_video.try &.live_now
- live_now ||= false
-
- premiere_timestamp = channel_video.try &.premiere_timestamp
-
- video = ChannelVideo.new({
- id: video_id,
- title: title,
- published: published,
- updated: Time.utc,
- ucid: ucid,
- author: author,
- length_seconds: length_seconds,
- live_now: live_now,
- premiere_timestamp: premiere_timestamp,
- views: views,
- })
-
- LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Updating or inserting video")
-
- # We don't include the 'premiere_timestamp' here because channel pages don't include them,
- # meaning the above timestamp is always null
- was_insert = db.query_one("INSERT INTO channel_videos VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) \
- ON CONFLICT (id) DO UPDATE SET title = $2, published = $3, \
- updated = $4, ucid = $5, author = $6, length_seconds = $7, \
- live_now = $8, views = $10 returning (xmax=0) as was_insert", *video.to_tuple, as: Bool)
-
- if was_insert
- LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Inserted, updating subscriptions")
- db.exec("UPDATE users SET notifications = array_append(notifications, $1), \
- feed_needs_update = true WHERE $2 = ANY(subscriptions)", video.id, video.ucid)
- else
- LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Updated")
- end
- end
-
- if pull_all_videos
- page += 1
-
- ids = [] of String
-
- loop do
- response_body = get_channel_videos_response(ucid, page, auto_generated: auto_generated)
- initial_data = JSON.parse(response_body)
- raise InfoException.new("Could not extract channel JSON") if !initial_data
- videos = extract_videos(initial_data.as_h, author, ucid)
-
- count = videos.size
- videos = videos.map { |video| ChannelVideo.new({
- id: video.id,
- title: video.title,
- published: video.published,
- updated: Time.utc,
- ucid: video.ucid,
- author: video.author,
- length_seconds: video.length_seconds,
- live_now: video.live_now,
- premiere_timestamp: video.premiere_timestamp,
- views: video.views,
- }) }
-
- videos.each do |video|
- ids << video.id
-
- # We are notified of Red videos elsewhere (PubSub), which includes a correct published date,
- # so since they don't provide a published date here we can safely ignore them.
- if Time.utc - video.published > 1.minute
- was_insert = db.query_one("INSERT INTO channel_videos VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) \
- ON CONFLICT (id) DO UPDATE SET title = $2, published = $3, \
- updated = $4, ucid = $5, author = $6, length_seconds = $7, \
- live_now = $8, views = $10 returning (xmax=0) as was_insert", *video.to_tuple, as: Bool)
-
- db.exec("UPDATE users SET notifications = array_append(notifications, $1), \
- feed_needs_update = true WHERE $2 = ANY(subscriptions)", video.id, video.ucid) if was_insert
- end
- end
-
- break if count < 25
- page += 1
- end
- end
-
- channel = InvidiousChannel.new({
- id: ucid,
- author: author,
- updated: Time.utc,
- deleted: false,
- subscribed: nil,
- })
-
- return channel
-end
-
-def fetch_channel_playlists(ucid, author, continuation, sort_by)
- if continuation
- response_json = request_youtube_api_browse(continuation)
- result = JSON.parse(response_json)
- continuationItems = result["onResponseReceivedActions"]?
- .try &.[0]["appendContinuationItemsAction"]["continuationItems"]
-
- return [] of SearchItem, nil if !continuationItems
-
- items = [] of SearchItem
- continuationItems.as_a.select(&.as_h.has_key?("gridPlaylistRenderer")).each { |item|
- extract_item(item, author, ucid).try { |t| items << t }
- }
-
- continuation = continuationItems.as_a.last["continuationItemRenderer"]?
- .try &.["continuationEndpoint"]["continuationCommand"]["token"].as_s
- else
- url = "/channel/#{ucid}/playlists?flow=list&view=1"
-
- case sort_by
- when "last", "last_added"
- #
- when "oldest", "oldest_created"
- url += "&sort=da"
- when "newest", "newest_created"
- url += "&sort=dd"
- else nil # Ignore
- end
-
- response = YT_POOL.client &.get(url)
- initial_data = extract_initial_data(response.body)
- return [] of SearchItem, nil if !initial_data
-
- items = extract_items(initial_data, author, ucid)
- continuation = response.body.match(/"token":"(?<continuation>[^"]+)"/).try &.["continuation"]?
- end
-
- return items, continuation
-end
-
-def produce_channel_videos_continuation(ucid, page = 1, auto_generated = nil, sort_by = "newest", v2 = false)
- object = {
- "80226972:embedded" => {
- "2:string" => ucid,
- "3:base64" => {
- "2:string" => "videos",
- "6:varint" => 2_i64,
- "7:varint" => 1_i64,
- "12:varint" => 1_i64,
- "13:string" => "",
- "23:varint" => 0_i64,
- },
- },
- }
-
- if !v2
- if auto_generated
- seed = Time.unix(1525757349)
- until seed >= Time.utc
- seed += 1.month
- end
- timestamp = seed - (page - 1).months
-
- object["80226972:embedded"]["3:base64"].as(Hash)["4:varint"] = 0x36_i64
- object["80226972:embedded"]["3:base64"].as(Hash)["15:string"] = "#{timestamp.to_unix}"
- else
- object["80226972:embedded"]["3:base64"].as(Hash)["4:varint"] = 0_i64
- object["80226972:embedded"]["3:base64"].as(Hash)["15:string"] = "#{page}"
- end
- else
- object["80226972:embedded"]["3:base64"].as(Hash)["4:varint"] = 0_i64
-
- object["80226972:embedded"]["3:base64"].as(Hash)["61:string"] = Base64.urlsafe_encode(Protodec::Any.from_json(Protodec::Any.cast_json({
- "1:string" => Base64.urlsafe_encode(Protodec::Any.from_json(Protodec::Any.cast_json({
- "1:varint" => 30_i64 * (page - 1),
- }))),
- })))
- end
-
- case sort_by
- when "newest"
- when "popular"
- object["80226972:embedded"]["3:base64"].as(Hash)["3:varint"] = 0x01_i64
- when "oldest"
- object["80226972:embedded"]["3:base64"].as(Hash)["3:varint"] = 0x02_i64
- else nil # Ignore
- end
-
- object["80226972:embedded"]["3:string"] = Base64.urlsafe_encode(Protodec::Any.from_json(Protodec::Any.cast_json(object["80226972:embedded"]["3:base64"])))
- object["80226972:embedded"].delete("3:base64")
-
- continuation = object.try { |i| Protodec::Any.cast_json(object) }
- .try { |i| Protodec::Any.from_json(i) }
- .try { |i| Base64.urlsafe_encode(i) }
- .try { |i| URI.encode_www_form(i) }
-
- return continuation
-end
-
-# Used in bypass_captcha_job.cr
-def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "newest", v2 = false)
- continuation = produce_channel_videos_continuation(ucid, page, auto_generated, sort_by, v2)
- return "/browse_ajax?continuation=#{continuation}&gl=US&hl=en"
-end
-
-# ## NOTE: DEPRECATED
-# Reason -> Unstable
-# The Protobuf object must be provided with an id of the last playlist from the current "page"
-# in order to fetch the next one accurately
-# (if the id isn't included, entries shift around erratically between pages,
-# leading to repetitions and skip overs)
-#
-# Since it's impossible to produce the appropriate Protobuf without an id being provided by the user,
-# it's better to stick to continuation tokens provided by the first request and onward
-def produce_channel_playlists_url(ucid, cursor, sort = "newest", auto_generated = false)
- object = {
- "80226972:embedded" => {
- "2:string" => ucid,
- "3:base64" => {
- "2:string" => "playlists",
- "6:varint" => 2_i64,
- "7:varint" => 1_i64,
- "12:varint" => 1_i64,
- "13:string" => "",
- "23:varint" => 0_i64,
- },
- },
- }
-
- if cursor
- cursor = Base64.urlsafe_encode(cursor, false) if !auto_generated
- object["80226972:embedded"]["3:base64"].as(Hash)["15:string"] = cursor
- end
-
- if auto_generated
- object["80226972:embedded"]["3:base64"].as(Hash)["4:varint"] = 0x32_i64
- else
- object["80226972:embedded"]["3:base64"].as(Hash)["4:varint"] = 1_i64
- case sort
- when "oldest", "oldest_created"
- object["80226972:embedded"]["3:base64"].as(Hash)["3:varint"] = 2_i64
- when "newest", "newest_created"
- object["80226972:embedded"]["3:base64"].as(Hash)["3:varint"] = 3_i64
- when "last", "last_added"
- object["80226972:embedded"]["3:base64"].as(Hash)["3:varint"] = 4_i64
- else nil # Ignore
- end
- end
-
- object["80226972:embedded"]["3:string"] = Base64.urlsafe_encode(Protodec::Any.from_json(Protodec::Any.cast_json(object["80226972:embedded"]["3:base64"])))
- object["80226972:embedded"].delete("3:base64")
-
- continuation = object.try { |i| Protodec::Any.cast_json(object) }
- .try { |i| Protodec::Any.from_json(i) }
- .try { |i| Base64.urlsafe_encode(i) }
- .try { |i| URI.encode_www_form(i) }
-
- return "/browse_ajax?continuation=#{continuation}&gl=US&hl=en"
-end
-
-# TODO: Add "sort_by"
-def fetch_channel_community(ucid, continuation, locale, format, thin_mode)
- response = YT_POOL.client &.get("/channel/#{ucid}/community?gl=US&hl=en")
- if response.status_code != 200
- response = YT_POOL.client &.get("/user/#{ucid}/community?gl=US&hl=en")
- end
-
- if response.status_code != 200
- raise InfoException.new("This channel does not exist.")
- end
-
- ucid = response.body.match(/https:\/\/www.youtube.com\/channel\/(?<ucid>UC[a-zA-Z0-9_-]{22})/).not_nil!["ucid"]
-
- if !continuation || continuation.empty?
- initial_data = extract_initial_data(response.body)
- body = initial_data["contents"]?.try &.["twoColumnBrowseResultsRenderer"]["tabs"].as_a.select { |tab| tab["tabRenderer"]?.try &.["selected"].as_bool.== true }[0]?
-
- if !body
- raise InfoException.new("Could not extract community tab.")
- end
-
- body = body["tabRenderer"]["content"]["sectionListRenderer"]["contents"][0]["itemSectionRenderer"]
- else
- continuation = produce_channel_community_continuation(ucid, continuation)
-
- headers = HTTP::Headers.new
- headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
-
- session_token = response.body.match(/"XSRF_TOKEN":"(?<session_token>[^"]+)"/).try &.["session_token"]? || ""
- post_req = {
- session_token: session_token,
- }
-
- response = YT_POOL.client &.post("/comment_service_ajax?action_get_comments=1&ctoken=#{continuation}&continuation=#{continuation}&hl=en&gl=US", headers, form: post_req)
- body = JSON.parse(response.body)
-
- body = body["response"]["continuationContents"]["itemSectionContinuation"]? ||
- body["response"]["continuationContents"]["backstageCommentsContinuation"]?
-
- if !body
- raise InfoException.new("Could not extract continuation.")
- end
- end
-
- continuation = body["continuations"]?.try &.[0]["nextContinuationData"]["continuation"].as_s
- posts = body["contents"].as_a
-
- if message = posts[0]["messageRenderer"]?
- error_message = (message["text"]["simpleText"]? ||
- message["text"]["runs"]?.try &.[0]?.try &.["text"]?)
- .try &.as_s || ""
- raise InfoException.new(error_message)
- end
-
- response = JSON.build do |json|
- json.object do
- json.field "authorId", ucid
- json.field "comments" do
- json.array do
- posts.each do |post|
- comments = post["backstagePostThreadRenderer"]?.try &.["comments"]? ||
- post["backstageCommentsContinuation"]?
-
- post = post["backstagePostThreadRenderer"]?.try &.["post"]["backstagePostRenderer"]? ||
- post["commentThreadRenderer"]?.try &.["comment"]["commentRenderer"]?
-
- next if !post
-
- content_html = post["contentText"]?.try { |t| parse_content(t) } || ""
- author = post["authorText"]?.try &.["simpleText"]? || ""
-
- json.object do
- json.field "author", author
- json.field "authorThumbnails" do
- json.array do
- qualities = {32, 48, 76, 100, 176, 512}
- author_thumbnail = post["authorThumbnail"]["thumbnails"].as_a[0]["url"].as_s
-
- qualities.each do |quality|
- json.object do
- json.field "url", author_thumbnail.gsub(/s\d+-/, "s#{quality}-")
- json.field "width", quality
- json.field "height", quality
- end
- end
- end
- end
-
- if post["authorEndpoint"]?
- json.field "authorId", post["authorEndpoint"]["browseEndpoint"]["browseId"]
- json.field "authorUrl", post["authorEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"].as_s
- else
- json.field "authorId", ""
- json.field "authorUrl", ""
- end
-
- published_text = post["publishedTimeText"]["runs"][0]["text"].as_s
- published = decode_date(published_text.rchop(" (edited)"))
-
- if published_text.includes?(" (edited)")
- json.field "isEdited", true
- else
- json.field "isEdited", false
- end
-
- like_count = post["actionButtons"]["commentActionButtonsRenderer"]["likeButton"]["toggleButtonRenderer"]["accessibilityData"]["accessibilityData"]["label"]
- .try &.as_s.gsub(/\D/, "").to_i? || 0
-
- json.field "content", html_to_content(content_html)
- json.field "contentHtml", content_html
-
- json.field "published", published.to_unix
- json.field "publishedText", translate(locale, "`x` ago", recode_date(published, locale))
-
- json.field "likeCount", like_count
- json.field "commentId", post["postId"]? || post["commentId"]? || ""
- json.field "authorIsChannelOwner", post["authorEndpoint"]["browseEndpoint"]["browseId"] == ucid
-
- if attachment = post["backstageAttachment"]?
- json.field "attachment" do
- json.object do
- case attachment.as_h
- when .has_key?("videoRenderer")
- attachment = attachment["videoRenderer"]
- json.field "type", "video"
-
- if !attachment["videoId"]?
- error_message = (attachment["title"]["simpleText"]? ||
- attachment["title"]["runs"]?.try &.[0]?.try &.["text"]?)
-
- json.field "error", error_message
- else
- video_id = attachment["videoId"].as_s
-
- video_title = attachment["title"]["simpleText"]? || attachment["title"]["runs"]?.try &.[0]?.try &.["text"]?
- json.field "title", video_title
- json.field "videoId", video_id
- json.field "videoThumbnails" do
- generate_thumbnails(json, video_id)
- end
-
- json.field "lengthSeconds", decode_length_seconds(attachment["lengthText"]["simpleText"].as_s)
-
- author_info = attachment["ownerText"]["runs"][0].as_h
-
- json.field "author", author_info["text"].as_s
- json.field "authorId", author_info["navigationEndpoint"]["browseEndpoint"]["browseId"]
- json.field "authorUrl", author_info["navigationEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"]
-
- # TODO: json.field "authorThumbnails", "channelThumbnailSupportedRenderers"
- # TODO: json.field "authorVerified", "ownerBadges"
-
- published = decode_date(attachment["publishedTimeText"]["simpleText"].as_s)
-
- json.field "published", published.to_unix
- json.field "publishedText", translate(locale, "`x` ago", recode_date(published, locale))
-
- view_count = attachment["viewCountText"]?.try &.["simpleText"].as_s.gsub(/\D/, "").to_i64? || 0_i64
-
- json.field "viewCount", view_count
- json.field "viewCountText", translate(locale, "`x` views", number_to_short_text(view_count))
- end
- when .has_key?("backstageImageRenderer")
- attachment = attachment["backstageImageRenderer"]
- json.field "type", "image"
-
- json.field "imageThumbnails" do
- json.array do
- thumbnail = attachment["image"]["thumbnails"][0].as_h
- width = thumbnail["width"].as_i
- height = thumbnail["height"].as_i
- aspect_ratio = (width.to_f / height.to_f)
- url = thumbnail["url"].as_s.gsub(/=w\d+-h\d+(-p)?(-nd)?(-df)?(-rwa)?/, "=s640")
-
- qualities = {320, 560, 640, 1280, 2000}
-
- qualities.each do |quality|
- json.object do
- json.field "url", url.gsub(/=s\d+/, "=s#{quality}")
- json.field "width", quality
- json.field "height", (quality / aspect_ratio).ceil.to_i
- end
- end
- end
- end
- # TODO
- # when .has_key?("pollRenderer")
- # attachment = attachment["pollRenderer"]
- # json.field "type", "poll"
- else
- json.field "type", "unknown"
- json.field "error", "Unrecognized attachment type."
- end
- end
- end
- end
-
- if comments && (reply_count = (comments["backstageCommentsRenderer"]["moreText"]["simpleText"]? ||
- comments["backstageCommentsRenderer"]["moreText"]["runs"]?.try &.[0]?.try &.["text"]?)
- .try &.as_s.gsub(/\D/, "").to_i?)
- continuation = comments["backstageCommentsRenderer"]["continuations"]?.try &.as_a[0]["nextContinuationData"]["continuation"].as_s
- continuation ||= ""
-
- json.field "replies" do
- json.object do
- json.field "replyCount", reply_count
- json.field "continuation", extract_channel_community_cursor(continuation)
- end
- end
- end
- end
- end
- end
- end
-
- if body["continuations"]?
- continuation = body["continuations"][0]["nextContinuationData"]["continuation"].as_s
- json.field "continuation", extract_channel_community_cursor(continuation)
- end
- end
- end
-
- if format == "html"
- response = JSON.parse(response)
- content_html = template_youtube_comments(response, locale, thin_mode)
-
- response = JSON.build do |json|
- json.object do
- json.field "contentHtml", content_html
- end
- end
- end
-
- return response
-end
-
-def produce_channel_community_continuation(ucid, cursor)
- object = {
- "80226972:embedded" => {
- "2:string" => ucid,
- "3:string" => cursor || "",
- },
- }
-
- continuation = object.try { |i| Protodec::Any.cast_json(object) }
- .try { |i| Protodec::Any.from_json(i) }
- .try { |i| Base64.urlsafe_encode(i) }
- .try { |i| URI.encode_www_form(i) }
-
- return continuation
-end
-
-def extract_channel_community_cursor(continuation)
- object = URI.decode_www_form(continuation)
- .try { |i| Base64.decode(i) }
- .try { |i| IO::Memory.new(i) }
- .try { |i| Protodec::Any.parse(i) }
- .try { |i| i["80226972:0:embedded"]["3:1:base64"].as_h }
-
- if object["53:2:embedded"]?.try &.["3:0:embedded"]?
- object["53:2:embedded"]["3:0:embedded"]["2:0:string"] = object["53:2:embedded"]["3:0:embedded"]
- .try { |i| i["2:0:base64"].as_h }
- .try { |i| Protodec::Any.cast_json(i) }
- .try { |i| Protodec::Any.from_json(i) }
- .try { |i| Base64.urlsafe_encode(i, padding: false) }
-
- object["53:2:embedded"]["3:0:embedded"].as_h.delete("2:0:base64")
- end
-
- cursor = Protodec::Any.cast_json(object)
- .try { |i| Protodec::Any.from_json(i) }
- .try { |i| Base64.urlsafe_encode(i) }
-
- cursor
-end
-
-def get_about_info(ucid, locale)
- result = YT_POOL.client &.get("/channel/#{ucid}/about?gl=US&hl=en")
- if result.status_code != 200
- result = YT_POOL.client &.get("/user/#{ucid}/about?gl=US&hl=en")
- end
-
- if md = result.headers["location"]?.try &.match(/\/channel\/(?<ucid>UC[a-zA-Z0-9_-]{22})/)
- raise ChannelRedirect.new(channel_id: md["ucid"])
- end
-
- if result.status_code != 200
- raise InfoException.new("This channel does not exist.")
- end
-
- about = XML.parse_html(result.body)
- if about.xpath_node(%q(//div[contains(@class, "channel-empty-message")]))
- raise InfoException.new("This channel does not exist.")
- end
-
- initdata = extract_initial_data(result.body)
- if initdata.empty?
- error_message = about.xpath_node(%q(//div[@class="yt-alert-content"])).try &.content.strip
- error_message ||= translate(locale, "Could not get channel info.")
- raise InfoException.new(error_message)
- end
-
- if browse_endpoint = initdata["onResponseReceivedActions"]?.try &.[0]?.try &.["navigateAction"]?.try &.["endpoint"]?.try &.["browseEndpoint"]?
- raise ChannelRedirect.new(channel_id: browse_endpoint["browseId"].to_s)
- end
-
- auto_generated = false
- # Check for special auto generated gaming channels
- if !initdata.has_key?("metadata")
- auto_generated = true
- end
-
- if auto_generated
- author = initdata["header"]["interactiveTabbedHeaderRenderer"]["title"]["simpleText"].as_s
- author_url = initdata["microformat"]["microformatDataRenderer"]["urlCanonical"].as_s
- author_thumbnail = initdata["header"]["interactiveTabbedHeaderRenderer"]["boxArt"]["thumbnails"][0]["url"].as_s
-
- # Raises a KeyError on failure.
- banners = initdata["header"]["interactiveTabbedHeaderRenderer"]?.try &.["banner"]?.try &.["thumbnails"]?
- banner = banners.try &.[-1]?.try &.["url"].as_s?
-
- description = initdata["header"]["interactiveTabbedHeaderRenderer"]["description"]["simpleText"].as_s
- description_html = HTML.escape(description).gsub("\n", "<br>")
-
- paid = false
- is_family_friendly = initdata["microformat"]["microformatDataRenderer"]["familySafe"].as_bool
- allowed_regions = initdata["microformat"]["microformatDataRenderer"]["availableCountries"].as_a.map { |a| a.as_s }
-
- related_channels = [] of AboutRelatedChannel
- else
- author = initdata["metadata"]["channelMetadataRenderer"]["title"].as_s
- author_url = initdata["metadata"]["channelMetadataRenderer"]["channelUrl"].as_s
- author_thumbnail = initdata["metadata"]["channelMetadataRenderer"]["avatar"]["thumbnails"][0]["url"].as_s
-
- ucid = initdata["metadata"]["channelMetadataRenderer"]["externalId"].as_s
-
- # Raises a KeyError on failure.
- banners = initdata["header"]["c4TabbedHeaderRenderer"]?.try &.["banner"]?.try &.["thumbnails"]?
- banner = banners.try &.[-1]?.try &.["url"].as_s?
-
- # if banner.includes? "channels/c4/default_banner"
- # banner = nil
- # end
-
- description = initdata["metadata"]["channelMetadataRenderer"]?.try &.["description"]?.try &.as_s? || ""
- description_html = HTML.escape(description).gsub("\n", "<br>")
-
- paid = about.xpath_node(%q(//meta[@itemprop="paid"])).not_nil!["content"] == "True"
- is_family_friendly = about.xpath_node(%q(//meta[@itemprop="isFamilyFriendly"])).not_nil!["content"] == "True"
- allowed_regions = about.xpath_node(%q(//meta[@itemprop="regionsAllowed"])).not_nil!["content"].split(",")
-
- related_channels = initdata["contents"]["twoColumnBrowseResultsRenderer"]
- .["secondaryContents"]?.try &.["browseSecondaryContentsRenderer"]["contents"][0]?
- .try &.["verticalChannelSectionRenderer"]?.try &.["items"]?.try &.as_a.map do |node|
- renderer = node["miniChannelRenderer"]?
- related_id = renderer.try &.["channelId"]?.try &.as_s?
- related_id ||= ""
-
- related_title = renderer.try &.["title"]?.try &.["simpleText"]?.try &.as_s?
- related_title ||= ""
-
- related_author_url = renderer.try &.["navigationEndpoint"]?.try &.["commandMetadata"]?.try &.["webCommandMetadata"]?
- .try &.["url"]?.try &.as_s?
- related_author_url ||= ""
-
- related_author_thumbnails = renderer.try &.["thumbnail"]?.try &.["thumbnails"]?.try &.as_a?
- related_author_thumbnails ||= [] of JSON::Any
-
- related_author_thumbnail = ""
- if related_author_thumbnails.size > 0
- related_author_thumbnail = related_author_thumbnails[-1]["url"]?.try &.as_s?
- related_author_thumbnail ||= ""
- end
-
- AboutRelatedChannel.new({
- ucid: related_id,
- author: related_title,
- author_url: related_author_url,
- author_thumbnail: related_author_thumbnail,
- })
- end
- related_channels ||= [] of AboutRelatedChannel
- end
-
- total_views = 0_i64
- joined = Time.unix(0)
- tabs = [] of String
-
- tabs_json = initdata["contents"]["twoColumnBrowseResultsRenderer"]["tabs"]?.try &.as_a?
- if !tabs_json.nil?
- # Retrieve information from the tabs array. The index we are looking for varies between channels.
- tabs_json.each do |node|
- # Try to find the about section which is located in only one of the tabs.
- channel_about_meta = node["tabRenderer"]?.try &.["content"]?.try &.["sectionListRenderer"]?
- .try &.["contents"]?.try &.[0]?.try &.["itemSectionRenderer"]?.try &.["contents"]?
- .try &.[0]?.try &.["channelAboutFullMetadataRenderer"]?
-
- if !channel_about_meta.nil?
- total_views = channel_about_meta["viewCountText"]?.try &.["simpleText"]?.try &.as_s.gsub(/\D/, "").to_i64? || 0_i64
-
- # The joined text is split to several sub strings. The reduce joins those strings before parsing the date.
- joined = channel_about_meta["joinedDateText"]?.try &.["runs"]?.try &.as_a.reduce("") { |acc, node| acc + node["text"].as_s }
- .try { |text| Time.parse(text, "Joined %b %-d, %Y", Time::Location.local) } || Time.unix(0)
-
- # Normal Auto-generated channels
- # https://support.google.com/youtube/answer/2579942
- # For auto-generated channels, channel_about_meta only has ["description"]["simpleText"] and ["primaryLinks"][0]["title"]["simpleText"]
- if (channel_about_meta["primaryLinks"]?.try &.size || 0) == 1 && (channel_about_meta["primaryLinks"][0]?) &&
- (channel_about_meta["primaryLinks"][0]["title"]?.try &.["simpleText"]?.try &.as_s? || "") == "Auto-generated by YouTube"
- auto_generated = true
- end
- end
- end
- tabs = tabs_json.reject { |node| node["tabRenderer"]?.nil? }.map { |node| node["tabRenderer"]["title"].as_s.downcase }
- end
-
- sub_count = initdata["header"]["c4TabbedHeaderRenderer"]?.try &.["subscriberCountText"]?.try &.["simpleText"]?.try &.as_s?
- .try { |text| short_text_to_number(text.split(" ")[0]) } || 0
-
- AboutChannel.new({
- ucid: ucid,
- author: author,
- auto_generated: auto_generated,
- author_url: author_url,
- author_thumbnail: author_thumbnail,
- banner: banner,
- description_html: description_html,
- paid: paid,
- total_views: total_views,
- sub_count: sub_count,
- joined: joined,
- is_family_friendly: is_family_friendly,
- allowed_regions: allowed_regions,
- related_channels: related_channels,
- tabs: tabs,
- })
-end
-
-def get_channel_videos_response(ucid, page = 1, auto_generated = nil, sort_by = "newest")
- continuation = produce_channel_videos_continuation(ucid, page,
- auto_generated: auto_generated, sort_by: sort_by, v2: true)
-
- return request_youtube_api_browse(continuation)
-end
-
-def get_60_videos(ucid, author, page, auto_generated, sort_by = "newest")
- videos = [] of SearchVideo
-
- 2.times do |i|
- response_json = get_channel_videos_response(ucid, page * 2 + (i - 1), auto_generated: auto_generated, sort_by: sort_by)
- initial_data = JSON.parse(response_json)
- break if !initial_data
- videos.concat extract_videos(initial_data.as_h, author, ucid)
- end
-
- return videos.size, videos
-end
-
-def get_latest_videos(ucid)
- response_json = get_channel_videos_response(ucid)
- initial_data = JSON.parse(response_json)
- return [] of SearchVideo if !initial_data
- author = initial_data["metadata"]?.try &.["channelMetadataRenderer"]?.try &.["title"]?.try &.as_s
- items = extract_videos(initial_data.as_h, author, ucid)
-
- return items
-end
diff --git a/src/invidious/channels/about.cr b/src/invidious/channels/about.cr
new file mode 100644
index 00000000..13909527
--- /dev/null
+++ b/src/invidious/channels/about.cr
@@ -0,0 +1,206 @@
+# TODO: Refactor into either SearchChannel or InvidiousChannel
+record AboutChannel,
+ ucid : String,
+ author : String,
+ auto_generated : Bool,
+ author_url : String,
+ author_thumbnail : String,
+ banner : String?,
+ description : String,
+ description_html : String,
+ total_views : Int64,
+ sub_count : Int32,
+ joined : Time,
+ is_family_friendly : Bool,
+ allowed_regions : Array(String),
+ tabs : Array(String),
+ tags : Array(String),
+ verified : Bool,
+ is_age_gated : Bool
+
+def get_about_info(ucid, locale) : AboutChannel
+ begin
+ # Fetch channel information from channel home page
+ initdata = YoutubeAPI.browse(browse_id: ucid, params: "")
+ rescue
+ raise InfoException.new("Could not get channel info.")
+ end
+
+ if initdata.dig?("alerts", 0, "alertRenderer", "type") == "ERROR"
+ error_message = initdata["alerts"][0]["alertRenderer"]["text"]["simpleText"].as_s
+ if error_message == "This channel does not exist."
+ raise NotFoundException.new(error_message)
+ else
+ raise InfoException.new(error_message)
+ end
+ end
+
+ if browse_endpoint = initdata["onResponseReceivedActions"]?.try &.[0]?.try &.["navigateAction"]?.try &.["endpoint"]?.try &.["browseEndpoint"]?
+ raise ChannelRedirect.new(channel_id: browse_endpoint["browseId"].to_s)
+ end
+
+ auto_generated = false
+ # Check for special auto generated gaming channels
+ if !initdata.has_key?("metadata")
+ auto_generated = true
+ end
+
+ tags = [] of String
+ tab_names = [] of String
+ total_views = 0_i64
+ joined = Time.unix(0)
+
+ if age_gate_renderer = initdata.dig?("contents", "twoColumnBrowseResultsRenderer", "tabs", 0, "tabRenderer", "content", "sectionListRenderer", "contents", 0, "channelAgeGateRenderer")
+ description_node = nil
+ author = age_gate_renderer["channelTitle"].as_s
+ ucid = initdata.dig("responseContext", "serviceTrackingParams", 0, "params", 0, "value").as_s
+ author_url = "https://www.youtube.com/channel/#{ucid}"
+ author_thumbnail = age_gate_renderer.dig("avatar", "thumbnails", 0, "url").as_s
+ banner = nil
+ is_family_friendly = false
+ is_age_gated = true
+ tab_names = ["videos", "shorts", "streams"]
+ auto_generated = false
+ else
+ if auto_generated
+ author = initdata["header"]["interactiveTabbedHeaderRenderer"]["title"]["simpleText"].as_s
+ author_url = initdata["microformat"]["microformatDataRenderer"]["urlCanonical"].as_s
+ author_thumbnail = initdata["header"]["interactiveTabbedHeaderRenderer"]["boxArt"]["thumbnails"][0]["url"].as_s
+
+ # Raises a KeyError on failure.
+ banners = initdata["header"]["interactiveTabbedHeaderRenderer"]?.try &.["banner"]?.try &.["thumbnails"]?
+ banner = banners.try &.[-1]?.try &.["url"].as_s?
+
+ description_base_node = initdata["header"]["interactiveTabbedHeaderRenderer"]["description"]
+ # some channels have the description in a simpleText
+ # ex: https://www.youtube.com/channel/UCQvWX73GQygcwXOTSf_VDVg/
+ description_node = description_base_node.dig?("simpleText") || description_base_node
+
+ tags = initdata.dig?("header", "interactiveTabbedHeaderRenderer", "badges")
+ .try &.as_a.map(&.["metadataBadgeRenderer"]["label"].as_s) || [] of String
+ else
+ author = initdata["metadata"]["channelMetadataRenderer"]["title"].as_s
+ author_url = initdata["metadata"]["channelMetadataRenderer"]["channelUrl"].as_s
+ author_thumbnail = initdata["metadata"]["channelMetadataRenderer"]["avatar"]["thumbnails"][0]["url"].as_s
+ author_verified = has_verified_badge?(initdata.dig?("header", "c4TabbedHeaderRenderer", "badges"))
+
+ ucid = initdata["metadata"]["channelMetadataRenderer"]["externalId"].as_s
+
+ # Raises a KeyError on failure.
+ banners = initdata["header"]["c4TabbedHeaderRenderer"]?.try &.["banner"]?.try &.["thumbnails"]?
+ banners ||= initdata.dig?("header", "pageHeaderRenderer", "content", "pageHeaderViewModel", "banner", "imageBannerViewModel", "image", "sources")
+ banner = banners.try &.[-1]?.try &.["url"].as_s?
+
+ # if banner.includes? "channels/c4/default_banner"
+ # banner = nil
+ # end
+
+ description_node = initdata["metadata"]["channelMetadataRenderer"]?.try &.["description"]?
+ tags = initdata.dig?("microformat", "microformatDataRenderer", "tags").try &.as_a.map(&.as_s) || [] of String
+ end
+
+ is_family_friendly = initdata["microformat"]["microformatDataRenderer"]["familySafe"].as_bool
+ if tabs_json = initdata["contents"]["twoColumnBrowseResultsRenderer"]["tabs"]?
+ # Get the name of the tabs available on this channel
+ tab_names = tabs_json.as_a.compact_map do |entry|
+ name = entry.dig?("tabRenderer", "title").try &.as_s.downcase
+
+ # This is a small fix to not add extra code on the HTML side
+ # I.e, the URL for the "live" tab is .../streams, so use "streams"
+ # everywhere for the sake of simplicity
+ (name == "live") ? "streams" : name
+ end
+
+ # Get the currently active tab ("About")
+ about_tab = extract_selected_tab(tabs_json)
+
+ # Try to find the about metadata section
+ channel_about_meta = about_tab.dig?(
+ "content",
+ "sectionListRenderer", "contents", 0,
+ "itemSectionRenderer", "contents", 0,
+ "channelAboutFullMetadataRenderer"
+ )
+
+ if !channel_about_meta.nil?
+ total_views = channel_about_meta.dig?("viewCountText", "simpleText").try &.as_s.gsub(/\D/, "").to_i64? || 0_i64
+
+ # The joined text is split to several sub strings. The reduce joins those strings before parsing the date.
+ joined = extract_text(channel_about_meta["joinedDateText"]?)
+ .try { |text| Time.parse(text, "Joined %b %-d, %Y", Time::Location.local) } || Time.unix(0)
+
+ # Normal Auto-generated channels
+ # https://support.google.com/youtube/answer/2579942
+ # For auto-generated channels, channel_about_meta only has
+ # ["description"]["simpleText"] and ["primaryLinks"][0]["title"]["simpleText"]
+ auto_generated = (
+ (channel_about_meta["primaryLinks"]?.try &.size) == 1 && \
+ extract_text(channel_about_meta.dig?("primaryLinks", 0, "title")) == "Auto-generated by YouTube" ||
+ channel_about_meta.dig?("links", 0, "channelExternalLinkViewModel", "title", "content").try &.as_s == "Auto-generated by YouTube"
+ )
+ end
+ end
+ end
+
+ allowed_regions = initdata
+ .dig?("microformat", "microformatDataRenderer", "availableCountries")
+ .try &.as_a.map(&.as_s) || [] of String
+
+ description = !description_node.nil? ? description_node.as_s : ""
+ description_html = HTML.escape(description)
+
+ if !description_node.nil?
+ if description_node.as_h?.nil?
+ description_node = text_to_parsed_content(description_node.as_s)
+ end
+ description_html = parse_content(description_node)
+ if description_html == "" && description != ""
+ description_html = HTML.escape(description)
+ end
+ end
+
+ sub_count = 0
+
+ if (metadata_rows = initdata.dig?("header", "pageHeaderRenderer", "content", "pageHeaderViewModel", "metadata", "contentMetadataViewModel", "metadataRows").try &.as_a)
+ metadata_rows.each do |row|
+ metadata_part = row.dig?("metadataParts").try &.as_a.find { |i| i.dig?("text", "content").try &.as_s.includes?("subscribers") }
+ if !metadata_part.nil?
+ sub_count = short_text_to_number(metadata_part.dig("text", "content").as_s.split(" ")[0]).to_i32
+ end
+ break if sub_count != 0
+ end
+ end
+
+ AboutChannel.new(
+ ucid: ucid,
+ author: author,
+ auto_generated: auto_generated,
+ author_url: author_url,
+ author_thumbnail: author_thumbnail,
+ banner: banner,
+ description: description,
+ description_html: description_html,
+ total_views: total_views,
+ sub_count: sub_count,
+ joined: joined,
+ is_family_friendly: is_family_friendly,
+ allowed_regions: allowed_regions,
+ tabs: tab_names,
+ tags: tags,
+ verified: author_verified || false,
+ is_age_gated: is_age_gated || false,
+ )
+end
+
+def fetch_related_channels(about_channel : AboutChannel, continuation : String? = nil) : {Array(SearchChannel), String?}
+ if continuation.nil?
+ # params is {"2:string":"channels"} encoded
+ initial_data = YoutubeAPI.browse(browse_id: about_channel.ucid, params: "EghjaGFubmVscw%3D%3D")
+ else
+ initial_data = YoutubeAPI.browse(continuation)
+ end
+
+ items, continuation = extract_items(initial_data)
+
+ return items.select(SearchChannel), continuation
+end
diff --git a/src/invidious/channels/channels.cr b/src/invidious/channels/channels.cr
new file mode 100644
index 00000000..1478c8fc
--- /dev/null
+++ b/src/invidious/channels/channels.cr
@@ -0,0 +1,304 @@
+struct InvidiousChannel
+ include DB::Serializable
+
+ property id : String
+ property author : String
+ property updated : Time
+ property deleted : Bool
+ property subscribed : Time?
+end
+
+struct ChannelVideo
+ include DB::Serializable
+
+ property id : String
+ property title : String
+ property published : Time
+ property updated : Time
+ property ucid : String
+ property author : String
+ property length_seconds : Int32 = 0
+ property live_now : Bool = false
+ property premiere_timestamp : Time? = nil
+ property views : Int64? = nil
+
+ def to_json(locale, json : JSON::Builder)
+ json.object do
+ json.field "type", "shortVideo"
+
+ json.field "title", self.title
+ json.field "videoId", self.id
+ json.field "videoThumbnails" do
+ Invidious::JSONify::APIv1.thumbnails(json, self.id)
+ end
+
+ json.field "lengthSeconds", self.length_seconds
+
+ json.field "author", self.author
+ json.field "authorId", self.ucid
+ json.field "authorUrl", "/channel/#{self.ucid}"
+ json.field "published", self.published.to_unix
+ json.field "publishedText", translate(locale, "`x` ago", recode_date(self.published, locale))
+
+ json.field "viewCount", self.views
+ end
+ end
+
+ def to_json(locale, _json : Nil = nil)
+ JSON.build do |json|
+ to_json(locale, json)
+ end
+ end
+
+ def to_xml(locale, query_params, xml : XML::Builder)
+ query_params["v"] = self.id
+
+ xml.element("entry") do
+ xml.element("id") { xml.text "yt:video:#{self.id}" }
+ xml.element("yt:videoId") { xml.text self.id }
+ xml.element("yt:channelId") { xml.text self.ucid }
+ xml.element("title") { xml.text self.title }
+ xml.element("link", rel: "alternate", href: "#{HOST_URL}/watch?#{query_params}")
+
+ xml.element("author") do
+ xml.element("name") { xml.text self.author }
+ xml.element("uri") { xml.text "#{HOST_URL}/channel/#{self.ucid}" }
+ end
+
+ xml.element("content", type: "xhtml") do
+ xml.element("div", xmlns: "http://www.w3.org/1999/xhtml") do
+ xml.element("a", href: "#{HOST_URL}/watch?#{query_params}") do
+ xml.element("img", src: "#{HOST_URL}/vi/#{self.id}/mqdefault.jpg")
+ end
+ end
+ end
+
+ xml.element("published") { xml.text self.published.to_s("%Y-%m-%dT%H:%M:%S%:z") }
+ xml.element("updated") { xml.text self.updated.to_s("%Y-%m-%dT%H:%M:%S%:z") }
+
+ xml.element("media:group") do
+ xml.element("media:title") { xml.text self.title }
+ xml.element("media:thumbnail", url: "#{HOST_URL}/vi/#{self.id}/mqdefault.jpg",
+ width: "320", height: "180")
+ end
+ end
+ end
+
+ def to_xml(locale, _xml : Nil = nil)
+ XML.build do |xml|
+ to_xml(locale, xml)
+ end
+ end
+
+ def to_tuple
+ {% begin %}
+ {
+ {{@type.instance_vars.map(&.name).splat}}
+ }
+ {% end %}
+ end
+end
+
+class ChannelRedirect < Exception
+ property channel_id : String
+
+ def initialize(@channel_id)
+ end
+end
+
+def get_batch_channels(channels)
+ finished_channel = Channel(String | Nil).new
+ max_threads = 10
+
+ spawn do
+ active_threads = 0
+ active_channel = Channel(Nil).new
+
+ channels.each do |ucid|
+ if active_threads >= max_threads
+ active_channel.receive
+ active_threads -= 1
+ end
+
+ active_threads += 1
+ spawn do
+ begin
+ get_channel(ucid)
+ finished_channel.send(ucid)
+ rescue ex
+ finished_channel.send(nil)
+ ensure
+ active_channel.send(nil)
+ end
+ end
+ end
+ end
+
+ final = [] of String
+ channels.size.times do
+ if ucid = finished_channel.receive
+ final << ucid
+ end
+ end
+
+ return final
+end
+
+def get_channel(id) : InvidiousChannel
+ channel = Invidious::Database::Channels.select(id)
+
+ if channel.nil? || (Time.utc - channel.updated) > 2.days
+ channel = fetch_channel(id, pull_all_videos: false)
+ Invidious::Database::Channels.insert(channel, update_on_conflict: true)
+ end
+
+ return channel
+end
+
+def fetch_channel(ucid, pull_all_videos : Bool)
+ LOGGER.debug("fetch_channel: #{ucid}")
+ LOGGER.trace("fetch_channel: #{ucid} : pull_all_videos = #{pull_all_videos}")
+
+ namespaces = {
+ "yt" => "http://www.youtube.com/xml/schemas/2015",
+ "media" => "http://search.yahoo.com/mrss/",
+ "default" => "http://www.w3.org/2005/Atom",
+ }
+
+ LOGGER.trace("fetch_channel: #{ucid} : Downloading RSS feed")
+ rss = YT_POOL.client &.get("/feeds/videos.xml?channel_id=#{ucid}").body
+ LOGGER.trace("fetch_channel: #{ucid} : Parsing RSS feed")
+ rss = XML.parse(rss)
+
+ author = rss.xpath_node("//default:feed/default:title", namespaces)
+ if !author
+ raise InfoException.new("Deleted or invalid channel")
+ end
+
+ author = author.content
+
+ # Auto-generated channels
+ # https://support.google.com/youtube/answer/2579942
+ if author.ends_with?(" - Topic") ||
+ {"Popular on YouTube", "Music", "Sports", "Gaming"}.includes? author
+ auto_generated = true
+ end
+
+ LOGGER.trace("fetch_channel: #{ucid} : author = #{author}, auto_generated = #{auto_generated}")
+
+ channel = InvidiousChannel.new({
+ id: ucid,
+ author: author,
+ updated: Time.utc,
+ deleted: false,
+ subscribed: nil,
+ })
+
+ LOGGER.trace("fetch_channel: #{ucid} : Downloading channel videos page")
+ videos, continuation = IV::Channel::Tabs.get_videos(channel)
+
+ LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel RSS feed")
+ rss.xpath_nodes("//default:feed/default:entry", namespaces).each do |entry|
+ video_id = entry.xpath_node("yt:videoId", namespaces).not_nil!.content
+ title = entry.xpath_node("default:title", namespaces).not_nil!.content
+
+ published = Time.parse_rfc3339(
+ entry.xpath_node("default:published", namespaces).not_nil!.content
+ )
+ updated = Time.parse_rfc3339(
+ entry.xpath_node("default:updated", namespaces).not_nil!.content
+ )
+
+ author = entry.xpath_node("default:author/default:name", namespaces).not_nil!.content
+ ucid = entry.xpath_node("yt:channelId", namespaces).not_nil!.content
+
+ views = entry
+ .xpath_node("media:group/media:community/media:statistics", namespaces)
+ .try &.["views"]?.try &.to_i64? || 0_i64
+
+ channel_video = videos
+ .select(SearchVideo)
+ .select(&.id.== video_id)[0]?
+
+ length_seconds = channel_video.try &.length_seconds
+ length_seconds ||= 0
+
+ live_now = channel_video.try &.badges.live_now?
+ live_now ||= false
+
+ premiere_timestamp = channel_video.try &.premiere_timestamp
+
+ video = ChannelVideo.new({
+ id: video_id,
+ title: title,
+ published: published,
+ updated: updated,
+ ucid: ucid,
+ author: author,
+ length_seconds: length_seconds,
+ live_now: live_now,
+ premiere_timestamp: premiere_timestamp,
+ views: views,
+ })
+
+ LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Updating or inserting video")
+
+ # We don't include the 'premiere_timestamp' here because channel pages don't include them,
+ # meaning the above timestamp is always null
+ was_insert = Invidious::Database::ChannelVideos.insert(video)
+
+ if was_insert
+ LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Inserted, updating subscriptions")
+ if CONFIG.enable_user_notifications
+ Invidious::Database::Users.add_notification(video)
+ else
+ Invidious::Database::Users.feed_needs_update(video)
+ end
+ else
+ LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Updated")
+ end
+ end
+
+ if pull_all_videos
+ loop do
+ # Keep fetching videos using the continuation token retrieved earlier
+ videos, continuation = IV::Channel::Tabs.get_videos(channel, continuation: continuation)
+
+ count = 0
+ videos.select(SearchVideo).each do |video|
+ count += 1
+ video = ChannelVideo.new({
+ id: video.id,
+ title: video.title,
+ published: video.published,
+ updated: Time.utc,
+ ucid: video.ucid,
+ author: video.author,
+ length_seconds: video.length_seconds,
+ live_now: video.badges.live_now?,
+ premiere_timestamp: video.premiere_timestamp,
+ views: video.views,
+ })
+
+ # We are notified of Red videos elsewhere (PubSub), which includes a correct published date,
+ # so since they don't provide a published date here we can safely ignore them.
+ if Time.utc - video.published > 1.minute
+ was_insert = Invidious::Database::ChannelVideos.insert(video)
+ if was_insert
+ if CONFIG.enable_user_notifications
+ Invidious::Database::Users.add_notification(video)
+ else
+ Invidious::Database::Users.feed_needs_update(video)
+ end
+ end
+ end
+ end
+
+ break if count < 25
+ sleep 500.milliseconds
+ end
+ end
+
+ channel.updated = Time.utc
+ return channel
+end
diff --git a/src/invidious/channels/community.cr b/src/invidious/channels/community.cr
new file mode 100644
index 00000000..49ffd990
--- /dev/null
+++ b/src/invidious/channels/community.cr
@@ -0,0 +1,332 @@
+private IMAGE_QUALITIES = {320, 560, 640, 1280, 2000}
+
+# TODO: Add "sort_by"
+def fetch_channel_community(ucid, cursor, locale, format, thin_mode)
+ if cursor.nil?
+ # Egljb21tdW5pdHk%3D is the protobuf object to load "community"
+ initial_data = YoutubeAPI.browse(ucid, params: "Egljb21tdW5pdHk%3D")
+
+ items = [] of JSON::Any
+ extract_items(initial_data) do |item|
+ items << item
+ end
+ else
+ continuation = produce_channel_community_continuation(ucid, cursor)
+ initial_data = YoutubeAPI.browse(continuation: continuation)
+
+ container = initial_data.dig?("continuationContents", "itemSectionContinuation", "contents")
+
+ raise InfoException.new("Can't extract community data") if container.nil?
+
+ items = container.as_a
+ end
+
+ return extract_channel_community(items, ucid: ucid, locale: locale, format: format, thin_mode: thin_mode)
+end
+
+def fetch_channel_community_post(ucid, post_id, locale, format, thin_mode)
+ object = {
+ "2:string" => "community",
+ "25:embedded" => {
+ "22:string" => post_id.to_s,
+ },
+ "45:embedded" => {
+ "2:varint" => 1_i64,
+ "3:varint" => 1_i64,
+ },
+ }
+ params = object.try { |i| Protodec::Any.cast_json(i) }
+ .try { |i| Protodec::Any.from_json(i) }
+ .try { |i| Base64.urlsafe_encode(i) }
+ .try { |i| URI.encode_www_form(i) }
+
+ initial_data = YoutubeAPI.browse(ucid, params: params)
+
+ items = [] of JSON::Any
+ extract_items(initial_data) do |item|
+ items << item
+ end
+
+ return extract_channel_community(items, ucid: ucid, locale: locale, format: format, thin_mode: thin_mode, is_single_post: true)
+end
+
+def extract_channel_community(items, *, ucid, locale, format, thin_mode, is_single_post : Bool = false)
+ if message = items[0]["messageRenderer"]?
+ error_message = (message["text"]["simpleText"]? ||
+ message["text"]["runs"]?.try &.[0]?.try &.["text"]?)
+ .try &.as_s || ""
+ if error_message == "This channel does not exist."
+ raise NotFoundException.new(error_message)
+ else
+ raise InfoException.new(error_message)
+ end
+ end
+
+ response = JSON.build do |json|
+ json.object do
+ json.field "authorId", ucid
+ if is_single_post
+ json.field "singlePost", true
+ end
+ json.field "comments" do
+ json.array do
+ items.each do |post|
+ comments = post["backstagePostThreadRenderer"]?.try &.["comments"]? ||
+ post["backstageCommentsContinuation"]?
+
+ post = post["backstagePostThreadRenderer"]?.try &.["post"]["backstagePostRenderer"]? ||
+ post["commentThreadRenderer"]?.try &.["comment"]["commentRenderer"]?
+
+ next if !post
+
+ content_html = post["contentText"]?.try { |t| parse_content(t) } || ""
+ author = post["authorText"]["runs"]?.try &.[0]?.try &.["text"]? || ""
+
+ json.object do
+ json.field "author", author
+ json.field "authorThumbnails" do
+ json.array do
+ qualities = {32, 48, 76, 100, 176, 512}
+ author_thumbnail = post["authorThumbnail"]["thumbnails"].as_a[0]["url"].as_s
+
+ qualities.each do |quality|
+ json.object do
+ json.field "url", author_thumbnail.gsub(/s\d+-/, "s#{quality}-")
+ json.field "width", quality
+ json.field "height", quality
+ end
+ end
+ end
+ end
+
+ if post["authorEndpoint"]?
+ json.field "authorId", post["authorEndpoint"]["browseEndpoint"]["browseId"]
+ json.field "authorUrl", post["authorEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"].as_s
+ else
+ json.field "authorId", ""
+ json.field "authorUrl", ""
+ end
+
+ published_text = post["publishedTimeText"]["runs"][0]["text"].as_s
+ published = decode_date(published_text.rchop(" (edited)"))
+
+ if published_text.includes?(" (edited)")
+ json.field "isEdited", true
+ else
+ json.field "isEdited", false
+ end
+
+ like_count = post["actionButtons"]["commentActionButtonsRenderer"]["likeButton"]["toggleButtonRenderer"]["accessibilityData"]["accessibilityData"]["label"]
+ .try &.as_s.gsub(/\D/, "").to_i? || 0
+
+ reply_count = short_text_to_number(post.dig?("actionButtons", "commentActionButtonsRenderer", "replyButton", "buttonRenderer", "text", "simpleText").try &.as_s || "0")
+
+ json.field "content", html_to_content(content_html)
+ json.field "contentHtml", content_html
+
+ json.field "published", published.to_unix
+ json.field "publishedText", translate(locale, "`x` ago", recode_date(published, locale))
+
+ json.field "likeCount", like_count
+ json.field "replyCount", reply_count
+ json.field "commentId", post["postId"]? || post["commentId"]? || ""
+ json.field "authorIsChannelOwner", post["authorEndpoint"]["browseEndpoint"]["browseId"] == ucid
+
+ if attachment = post["backstageAttachment"]?
+ json.field "attachment" do
+ case attachment.as_h
+ when .has_key?("videoRenderer")
+ parse_item(attachment)
+ .as(SearchVideo)
+ .to_json(locale, json)
+ when .has_key?("backstageImageRenderer")
+ json.object do
+ attachment = attachment["backstageImageRenderer"]
+ json.field "type", "image"
+
+ json.field "imageThumbnails" do
+ json.array do
+ thumbnail = attachment["image"]["thumbnails"][0].as_h
+ width = thumbnail["width"].as_i
+ height = thumbnail["height"].as_i
+ aspect_ratio = (width.to_f / height.to_f)
+ url = thumbnail["url"].as_s.gsub(/=w\d+-h\d+(-p)?(-nd)?(-df)?(-rwa)?/, "=s640")
+
+ IMAGE_QUALITIES.each do |quality|
+ json.object do
+ json.field "url", url.gsub(/=s\d+/, "=s#{quality}")
+ json.field "width", quality
+ json.field "height", (quality / aspect_ratio).ceil.to_i
+ end
+ end
+ end
+ end
+ end
+ when .has_key?("pollRenderer")
+ json.object do
+ attachment = attachment["pollRenderer"]
+ json.field "type", "poll"
+ json.field "totalVotes", short_text_to_number(attachment["totalVotes"]["simpleText"].as_s.split(" ")[0])
+ json.field "choices" do
+ json.array do
+ attachment["choices"].as_a.each do |choice|
+ json.object do
+ json.field "text", choice.dig("text", "runs", 0, "text").as_s
+ # A choice can have an image associated with it.
+ # Ex post: https://www.youtube.com/post/UgkxD4XavXUD4NQiddJXXdohbwOwcVqrH9Re
+ if choice["image"]?
+ thumbnail = choice["image"]["thumbnails"][0].as_h
+ width = thumbnail["width"].as_i
+ height = thumbnail["height"].as_i
+ aspect_ratio = (width.to_f / height.to_f)
+ url = thumbnail["url"].as_s.gsub(/=w\d+-h\d+(-p)?(-nd)?(-df)?(-rwa)?/, "=s640")
+ json.field "image" do
+ json.array do
+ IMAGE_QUALITIES.each do |quality|
+ json.object do
+ json.field "url", url.gsub(/=s\d+/, "=s#{quality}")
+ json.field "width", quality
+ json.field "height", (quality / aspect_ratio).ceil.to_i
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ when .has_key?("postMultiImageRenderer")
+ json.object do
+ attachment = attachment["postMultiImageRenderer"]
+ json.field "type", "multiImage"
+ json.field "images" do
+ json.array do
+ attachment["images"].as_a.each do |image|
+ json.array do
+ thumbnail = image["backstageImageRenderer"]["image"]["thumbnails"][0].as_h
+ width = thumbnail["width"].as_i
+ height = thumbnail["height"].as_i
+ aspect_ratio = (width.to_f / height.to_f)
+ url = thumbnail["url"].as_s.gsub(/=w\d+-h\d+(-p)?(-nd)?(-df)?(-rwa)?/, "=s640")
+
+ IMAGE_QUALITIES.each do |quality|
+ json.object do
+ json.field "url", url.gsub(/=s\d+/, "=s#{quality}")
+ json.field "width", quality
+ json.field "height", (quality / aspect_ratio).ceil.to_i
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ when .has_key?("playlistRenderer")
+ parse_item(attachment)
+ .as(SearchPlaylist)
+ .to_json(locale, json)
+ when .has_key?("quizRenderer")
+ json.object do
+ attachment = attachment["quizRenderer"]
+ json.field "type", "quiz"
+ json.field "totalVotes", short_text_to_number(attachment["totalVotes"]["simpleText"].as_s.split(" ")[0])
+ json.field "choices" do
+ json.array do
+ attachment["choices"].as_a.each do |choice|
+ json.object do
+ json.field "text", choice.dig("text", "runs", 0, "text").as_s
+ json.field "isCorrect", choice["isCorrect"].as_bool
+ end
+ end
+ end
+ end
+ end
+ else
+ json.object do
+ json.field "type", "unknown"
+ json.field "error", "Unrecognized attachment type."
+ end
+ end
+ end
+ end
+
+ if comments && (reply_count = (comments["backstageCommentsRenderer"]["moreText"]["simpleText"]? ||
+ comments["backstageCommentsRenderer"]["moreText"]["runs"]?.try &.[0]?.try &.["text"]?)
+ .try &.as_s.gsub(/\D/, "").to_i?)
+ continuation = comments["backstageCommentsRenderer"]["continuations"]?.try &.as_a[0]["nextContinuationData"]["continuation"].as_s
+ continuation ||= ""
+
+ json.field "replies" do
+ json.object do
+ json.field "replyCount", reply_count
+ json.field "continuation", extract_channel_community_cursor(continuation)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if !is_single_post
+ if cont = items.dig?(-1, "continuationItemRenderer", "continuationEndpoint", "continuationCommand", "token")
+ json.field "continuation", extract_channel_community_cursor(cont.as_s)
+ end
+ end
+ end
+ end
+
+ if format == "html"
+ response = JSON.parse(response)
+ content_html = IV::Frontend::Comments.template_youtube(response, locale, thin_mode)
+
+ response = JSON.build do |json|
+ json.object do
+ json.field "contentHtml", content_html
+ end
+ end
+ end
+
+ return response
+end
+
+def produce_channel_community_continuation(ucid, cursor)
+ object = {
+ "80226972:embedded" => {
+ "2:string" => ucid,
+ "3:string" => cursor || "",
+ },
+ }
+
+ continuation = object.try { |i| Protodec::Any.cast_json(i) }
+ .try { |i| Protodec::Any.from_json(i) }
+ .try { |i| Base64.urlsafe_encode(i) }
+ .try { |i| URI.encode_www_form(i) }
+
+ return continuation
+end
+
+def extract_channel_community_cursor(continuation)
+ object = URI.decode_www_form(continuation)
+ .try { |i| Base64.decode(i) }
+ .try { |i| IO::Memory.new(i) }
+ .try { |i| Protodec::Any.parse(i) }
+ .try(&.["80226972:0:embedded"]["3:1:base64"].as_h)
+
+ if object["53:2:embedded"]?.try &.["3:0:embedded"]?
+ object["53:2:embedded"]["3:0:embedded"]["2:0:string"] = object["53:2:embedded"]["3:0:embedded"]
+ .try(&.["2:0:base64"].as_h)
+ .try { |i| Protodec::Any.cast_json(i) }
+ .try { |i| Protodec::Any.from_json(i) }
+ .try { |i| Base64.urlsafe_encode(i, padding: false) }
+
+ object["53:2:embedded"]["3:0:embedded"].as_h.delete("2:0:base64")
+ end
+
+ cursor = Protodec::Any.cast_json(object)
+ .try { |i| Protodec::Any.from_json(i) }
+ .try { |i| Base64.urlsafe_encode(i) }
+
+ cursor
+end
diff --git a/src/invidious/channels/playlists.cr b/src/invidious/channels/playlists.cr
new file mode 100644
index 00000000..91029fe3
--- /dev/null
+++ b/src/invidious/channels/playlists.cr
@@ -0,0 +1,46 @@
+def fetch_channel_playlists(ucid, author, continuation, sort_by)
+ if continuation
+ initial_data = YoutubeAPI.browse(continuation)
+ else
+ params =
+ case sort_by
+ when "last", "last_added"
+ # Equivalent to "&sort=lad"
+ # {"2:string": "playlists", "3:varint": 4, "4:varint": 1, "6:varint": 1}
+ "EglwbGF5bGlzdHMYBCABMAE%3D"
+ when "oldest", "oldest_created"
+ # formerly "&sort=da"
+ # Not available anymore :c or maybe ??
+ # {"2:string": "playlists", "3:varint": 2, "4:varint": 1, "6:varint": 1}
+ "EglwbGF5bGlzdHMYAiABMAE%3D"
+ # {"2:string": "playlists", "3:varint": 1, "4:varint": 1, "6:varint": 1}
+ # "EglwbGF5bGlzdHMYASABMAE%3D"
+ when "newest", "newest_created"
+ # Formerly "&sort=dd"
+ # {"2:string": "playlists", "3:varint": 3, "4:varint": 1, "6:varint": 1}
+ "EglwbGF5bGlzdHMYAyABMAE%3D"
+ end
+
+ initial_data = YoutubeAPI.browse(ucid, params: params || "")
+ end
+
+ return extract_items(initial_data, author, ucid)
+end
+
+def fetch_channel_podcasts(ucid, author, continuation)
+ if continuation
+ initial_data = YoutubeAPI.browse(continuation)
+ else
+ initial_data = YoutubeAPI.browse(ucid, params: "Eghwb2RjYXN0c_IGBQoDugEA")
+ end
+ return extract_items(initial_data, author, ucid)
+end
+
+def fetch_channel_releases(ucid, author, continuation)
+ if continuation
+ initial_data = YoutubeAPI.browse(continuation)
+ else
+ initial_data = YoutubeAPI.browse(ucid, params: "EghyZWxlYXNlc_IGBQoDsgEA")
+ end
+ return extract_items(initial_data, author, ucid)
+end
diff --git a/src/invidious/channels/videos.cr b/src/invidious/channels/videos.cr
new file mode 100644
index 00000000..96400f47
--- /dev/null
+++ b/src/invidious/channels/videos.cr
@@ -0,0 +1,192 @@
+module Invidious::Channel::Tabs
+ extend self
+
+ # -------------------
+ # Regular videos
+ # -------------------
+
+ # Wrapper for AboutChannel, as we still need to call get_videos with
+ # an author name and ucid directly (e.g in RSS feeds).
+ # TODO: figure out how to get rid of that
+ def get_videos(channel : AboutChannel, *, continuation : String? = nil, sort_by = "newest")
+ return get_videos(
+ channel.author, channel.ucid,
+ continuation: continuation, sort_by: sort_by
+ )
+ end
+
+ # Wrapper for InvidiousChannel, as we still need to call get_videos with
+ # an author name and ucid directly (e.g in RSS feeds).
+ # TODO: figure out how to get rid of that
+ def get_videos(channel : InvidiousChannel, *, continuation : String? = nil, sort_by = "newest")
+ return get_videos(
+ channel.author, channel.id,
+ continuation: continuation, sort_by: sort_by
+ )
+ end
+
+ def get_videos(author : String, ucid : String, *, continuation : String? = nil, sort_by = "newest")
+ continuation ||= make_initial_videos_ctoken(ucid, sort_by)
+ initial_data = YoutubeAPI.browse(continuation: continuation)
+
+ return extract_items(initial_data, author, ucid)
+ end
+
+ def get_60_videos(channel : AboutChannel, *, continuation : String? = nil, sort_by = "newest")
+ if continuation.nil?
+ # Fetch the first "page" of video
+ items, next_continuation = get_videos(channel, sort_by: sort_by)
+ else
+ # Fetch a "page" of videos using the given continuation token
+ items, next_continuation = get_videos(channel, continuation: continuation)
+ end
+
+ # If there is more to load, then load a second "page"
+ # and replace the previous continuation token
+ if !next_continuation.nil?
+ items_2, next_continuation = get_videos(channel, continuation: next_continuation)
+ items.concat items_2
+ end
+
+ return items, next_continuation
+ end
+
+ # -------------------
+ # Shorts
+ # -------------------
+
+ def get_shorts(channel : AboutChannel, *, continuation : String? = nil, sort_by = "newest")
+ continuation ||= make_initial_shorts_ctoken(channel.ucid, sort_by)
+ initial_data = YoutubeAPI.browse(continuation: continuation)
+
+ return extract_items(initial_data, channel.author, channel.ucid)
+ end
+
+ # -------------------
+ # Livestreams
+ # -------------------
+
+ def get_livestreams(channel : AboutChannel, *, continuation : String? = nil, sort_by = "newest")
+ continuation ||= make_initial_livestreams_ctoken(channel.ucid, sort_by)
+ initial_data = YoutubeAPI.browse(continuation: continuation)
+
+ return extract_items(initial_data, channel.author, channel.ucid)
+ end
+
+ def get_60_livestreams(channel : AboutChannel, *, continuation : String? = nil, sort_by = "newest")
+ if continuation.nil?
+ # Fetch the first "page" of stream
+ items, next_continuation = get_livestreams(channel, sort_by: sort_by)
+ else
+ # Fetch a "page" of streams using the given continuation token
+ items, next_continuation = get_livestreams(channel, continuation: continuation)
+ end
+
+ # If there is more to load, then load a second "page"
+ # and replace the previous continuation token
+ if !next_continuation.nil?
+ items_2, next_continuation = get_livestreams(channel, continuation: next_continuation)
+ items.concat items_2
+ end
+
+ return items, next_continuation
+ end
+
+ # -------------------
+ # C-tokens
+ # -------------------
+
+ private def sort_options_videos_short(sort_by : String)
+ case sort_by
+ when "newest" then return 4_i64
+ when "popular" then return 2_i64
+ when "oldest" then return 5_i64
+ else return 4_i64 # Fallback to "newest"
+ end
+ end
+
+ # Generate the initial "continuation token" to get the first page of the
+ # "videos" tab. The following page requires the ctoken provided in that
+ # first page, and so on.
+ private def make_initial_videos_ctoken(ucid : String, sort_by = "newest")
+ object = {
+ "15:embedded" => {
+ "2:embedded" => {
+ "1:string" => "00000000-0000-0000-0000-000000000000",
+ },
+ "4:varint" => sort_options_videos_short(sort_by),
+ },
+ }
+
+ return channel_ctoken_wrap(ucid, object)
+ end
+
+ # Generate the initial "continuation token" to get the first page of the
+ # "shorts" tab. The following page requires the ctoken provided in that
+ # first page, and so on.
+ private def make_initial_shorts_ctoken(ucid : String, sort_by = "newest")
+ object = {
+ "10:embedded" => {
+ "2:embedded" => {
+ "1:string" => "00000000-0000-0000-0000-000000000000",
+ },
+ "4:varint" => sort_options_videos_short(sort_by),
+ },
+ }
+
+ return channel_ctoken_wrap(ucid, object)
+ end
+
+ # Generate the initial "continuation token" to get the first page of the
+ # "livestreams" tab. The following page requires the ctoken provided in that
+ # first page, and so on.
+ private def make_initial_livestreams_ctoken(ucid : String, sort_by = "newest")
+ sort_by_numerical =
+ case sort_by
+ when "newest" then 12_i64
+ when "popular" then 14_i64
+ when "oldest" then 13_i64
+ else 12_i64 # Fallback to "newest"
+ end
+
+ object = {
+ "14:embedded" => {
+ "2:embedded" => {
+ "1:string" => "00000000-0000-0000-0000-000000000000",
+ },
+ "5:varint" => sort_by_numerical,
+ },
+ }
+
+ return channel_ctoken_wrap(ucid, object)
+ end
+
+ # The protobuf structure common between videos/shorts/livestreams
+ private def channel_ctoken_wrap(ucid : String, object)
+ object_inner = {
+ "110:embedded" => {
+ "3:embedded" => object,
+ },
+ }
+
+ object_inner_encoded = object_inner
+ .try { |i| Protodec::Any.cast_json(i) }
+ .try { |i| Protodec::Any.from_json(i) }
+ .try { |i| Base64.urlsafe_encode(i) }
+ .try { |i| URI.encode_www_form(i) }
+
+ object = {
+ "80226972:embedded" => {
+ "2:string" => ucid,
+ "3:string" => object_inner_encoded,
+ },
+ }
+
+ continuation = object.try { |i| Protodec::Any.cast_json(i) }
+ .try { |i| Protodec::Any.from_json(i) }
+ .try { |i| Base64.urlsafe_encode(i) }
+ .try { |i| URI.encode_www_form(i) }
+
+ return continuation
+ end
+end
diff --git a/src/invidious/comments.cr b/src/invidious/comments.cr
deleted file mode 100644
index 81d6ac2b..00000000
--- a/src/invidious/comments.cr
+++ /dev/null
@@ -1,662 +0,0 @@
-class RedditThing
- include JSON::Serializable
-
- property kind : String
- property data : RedditComment | RedditLink | RedditMore | RedditListing
-end
-
-class RedditComment
- include JSON::Serializable
-
- property author : String
- property body_html : String
- property replies : RedditThing | String
- property score : Int32
- property depth : Int32
- property permalink : String
-
- @[JSON::Field(converter: RedditComment::TimeConverter)]
- property created_utc : Time
-
- module TimeConverter
- def self.from_json(value : JSON::PullParser) : Time
- Time.unix(value.read_float.to_i)
- end
-
- def self.to_json(value : Time, json : JSON::Builder)
- json.number(value.to_unix)
- end
- end
-end
-
-struct RedditLink
- include JSON::Serializable
-
- property author : String
- property score : Int32
- property subreddit : String
- property num_comments : Int32
- property id : String
- property permalink : String
- property title : String
-end
-
-struct RedditMore
- include JSON::Serializable
-
- property children : Array(String)
- property count : Int32
- property depth : Int32
-end
-
-class RedditListing
- include JSON::Serializable
-
- property children : Array(RedditThing)
- property modhash : String
-end
-
-def fetch_youtube_comments(id, db, cursor, format, locale, thin_mode, region, sort_by = "top", action = "action_get_comments")
- video = get_video(id, db, region: region)
- session_token = video.session_token
-
- case cursor
- when nil, ""
- ctoken = produce_comment_continuation(id, cursor: "", sort_by: sort_by)
- # when .starts_with? "Ug"
- # ctoken = produce_comment_reply_continuation(id, video.ucid, cursor)
- when .starts_with? "ADSJ"
- ctoken = produce_comment_continuation(id, cursor: cursor, sort_by: sort_by)
- else
- ctoken = cursor
- end
-
- if !session_token
- if format == "json"
- return {"comments" => [] of String}.to_json
- else
- return {"contentHtml" => "", "commentCount" => 0}.to_json
- end
- end
-
- post_req = {
- page_token: ctoken,
- session_token: session_token,
- }
-
- headers = HTTP::Headers{
- "cookie" => video.cookie,
- }
-
- response = YT_POOL.client(region, &.post("/comment_service_ajax?#{action}=1&hl=en&gl=US&pbj=1", headers, form: post_req))
- response = JSON.parse(response.body)
-
- # For some reason youtube puts it in an array for comment_replies but otherwise it's the same
- if action == "action_get_comment_replies"
- response = response[1]
- end
-
- if !response["response"]["continuationContents"]?
- raise InfoException.new("Could not fetch comments")
- end
-
- response = response["response"]["continuationContents"]
- if response["commentRepliesContinuation"]?
- body = response["commentRepliesContinuation"]
- else
- body = response["itemSectionContinuation"]
- end
-
- contents = body["contents"]?
- if !contents
- if format == "json"
- return {"comments" => [] of String}.to_json
- else
- return {"contentHtml" => "", "commentCount" => 0}.to_json
- end
- end
-
- response = JSON.build do |json|
- json.object do
- if body["header"]?
- count_text = body["header"]["commentsHeaderRenderer"]["countText"]
- comment_count = (count_text["simpleText"]? || count_text["runs"]?.try &.[0]?.try &.["text"]?)
- .try &.as_s.gsub(/\D/, "").to_i? || 0
-
- json.field "commentCount", comment_count
- end
-
- json.field "videoId", id
-
- json.field "comments" do
- json.array do
- contents.as_a.each do |node|
- json.object do
- if !response["commentRepliesContinuation"]?
- node = node["commentThreadRenderer"]
- end
-
- if node["replies"]?
- node_replies = node["replies"]["commentRepliesRenderer"]
- end
-
- if !response["commentRepliesContinuation"]?
- node_comment = node["comment"]["commentRenderer"]
- else
- node_comment = node["commentRenderer"]
- end
-
- content_html = node_comment["contentText"]?.try { |t| parse_content(t) } || ""
- author = node_comment["authorText"]?.try &.["simpleText"]? || ""
-
- json.field "author", author
- json.field "authorThumbnails" do
- json.array do
- node_comment["authorThumbnail"]["thumbnails"].as_a.each do |thumbnail|
- json.object do
- json.field "url", thumbnail["url"]
- json.field "width", thumbnail["width"]
- json.field "height", thumbnail["height"]
- end
- end
- end
- end
-
- if node_comment["authorEndpoint"]?
- json.field "authorId", node_comment["authorEndpoint"]["browseEndpoint"]["browseId"]
- json.field "authorUrl", node_comment["authorEndpoint"]["browseEndpoint"]["canonicalBaseUrl"]
- else
- json.field "authorId", ""
- json.field "authorUrl", ""
- end
-
- published_text = node_comment["publishedTimeText"]["runs"][0]["text"].as_s
- published = decode_date(published_text.rchop(" (edited)"))
-
- if published_text.includes?(" (edited)")
- json.field "isEdited", true
- else
- json.field "isEdited", false
- end
-
- json.field "content", html_to_content(content_html)
- json.field "contentHtml", content_html
-
- json.field "published", published.to_unix
- json.field "publishedText", translate(locale, "`x` ago", recode_date(published, locale))
-
- comment_action_buttons_renderer = node_comment["actionButtons"]["commentActionButtonsRenderer"]
-
- json.field "likeCount", comment_action_buttons_renderer["likeButton"]["toggleButtonRenderer"]["accessibilityData"]["accessibilityData"]["label"].as_s.scan(/\d/).map(&.[0]).join.to_i
- json.field "commentId", node_comment["commentId"]
- json.field "authorIsChannelOwner", node_comment["authorIsChannelOwner"]
-
- if comment_action_buttons_renderer["creatorHeart"]?
- hearth_data = comment_action_buttons_renderer["creatorHeart"]["creatorHeartRenderer"]["creatorThumbnail"]
- json.field "creatorHeart" do
- json.object do
- json.field "creatorThumbnail", hearth_data["thumbnails"][-1]["url"]
- json.field "creatorName", hearth_data["accessibility"]["accessibilityData"]["label"]
- end
- end
- end
-
- if node_replies && !response["commentRepliesContinuation"]?
- if node_replies["moreText"]?
- reply_count = (node_replies["moreText"]["simpleText"]? || node_replies["moreText"]["runs"]?.try &.[0]?.try &.["text"]?)
- .try &.as_s.gsub(/\D/, "").to_i? || 1
- elsif node_replies["viewReplies"]?
- reply_count = node_replies["viewReplies"]["buttonRenderer"]["text"]?.try &.["runs"][1]?.try &.["text"]?.try &.as_s.to_i? || 1
- else
- reply_count = 1
- end
-
- continuation = node_replies["continuations"]?.try &.as_a[0]["nextContinuationData"]["continuation"].as_s
- continuation ||= ""
-
- json.field "replies" do
- json.object do
- json.field "replyCount", reply_count
- json.field "continuation", continuation
- end
- end
- end
- end
- end
- end
- end
-
- if body["continuations"]?
- continuation = body["continuations"][0]["nextContinuationData"]["continuation"].as_s
- json.field "continuation", continuation
- end
- end
- end
-
- if format == "html"
- response = JSON.parse(response)
- content_html = template_youtube_comments(response, locale, thin_mode, action == "action_get_comment_replies")
-
- response = JSON.build do |json|
- json.object do
- json.field "contentHtml", content_html
-
- if response["commentCount"]?
- json.field "commentCount", response["commentCount"]
- else
- json.field "commentCount", 0
- end
- end
- end
- end
-
- return response
-end
-
-def fetch_reddit_comments(id, sort_by = "confidence")
- client = make_client(REDDIT_URL)
- headers = HTTP::Headers{"User-Agent" => "web:invidious:v#{CURRENT_VERSION} (by github.com/iv-org/invidious)"}
-
- # TODO: Use something like #479 for a static list of instances to use here
- query = "(url:3D#{id}%20OR%20url:#{id})%20(site:invidio.us%20OR%20site:youtube.com%20OR%20site:youtu.be)"
- search_results = client.get("/search.json?q=#{query}", headers)
-
- if search_results.status_code == 200
- search_results = RedditThing.from_json(search_results.body)
-
- # For videos that have more than one thread, choose the one with the highest score
- thread = search_results.data.as(RedditListing).children.sort_by { |child| child.data.as(RedditLink).score }[-1]
- thread = thread.data.as(RedditLink)
-
- result = client.get("/r/#{thread.subreddit}/comments/#{thread.id}.json?limit=100&sort=#{sort_by}", headers).body
- result = Array(RedditThing).from_json(result)
- elsif search_results.status_code == 302
- # Previously, if there was only one result then the API would redirect to that result.
- # Now, it appears it will still return a listing so this section is likely unnecessary.
-
- result = client.get(search_results.headers["Location"], headers).body
- result = Array(RedditThing).from_json(result)
-
- thread = result[0].data.as(RedditListing).children[0].data.as(RedditLink)
- else
- raise InfoException.new("Could not fetch comments")
- end
-
- client.close
-
- comments = result[1].data.as(RedditListing).children
- return comments, thread
-end
-
-def template_youtube_comments(comments, locale, thin_mode, is_replies = false)
- String.build do |html|
- root = comments["comments"].as_a
- root.each do |child|
- if child["replies"]?
- replies_html = <<-END_HTML
- <div id="replies" class="pure-g">
- <div class="pure-u-1-24"></div>
- <div class="pure-u-23-24">
- <p>
- <a href="javascript:void(0)" data-continuation="#{child["replies"]["continuation"]}"
- data-onclick="get_youtube_replies" data-load-replies>#{translate(locale, "View `x` replies", number_with_separator(child["replies"]["replyCount"]))}</a>
- </p>
- </div>
- </div>
- END_HTML
- end
-
- if !thin_mode
- author_thumbnail = "/ggpht#{URI.parse(child["authorThumbnails"][-1]["url"].as_s).request_target}"
- else
- author_thumbnail = ""
- end
-
- html << <<-END_HTML
- <div class="pure-g" style="width:100%">
- <div class="channel-profile pure-u-4-24 pure-u-md-2-24">
- <img style="padding-right:1em;padding-top:1em;width:90%" src="#{author_thumbnail}">
- </div>
- <div class="pure-u-20-24 pure-u-md-22-24">
- <p>
- <b>
- <a class="#{child["authorIsChannelOwner"] == true ? "channel-owner" : ""}" href="#{child["authorUrl"]}">#{child["author"]}</a>
- </b>
- <p style="white-space:pre-wrap">#{child["contentHtml"]}</p>
- END_HTML
-
- if child["attachment"]?
- attachment = child["attachment"]
-
- case attachment["type"]
- when "image"
- attachment = attachment["imageThumbnails"][1]
-
- html << <<-END_HTML
- <div class="pure-g">
- <div class="pure-u-1 pure-u-md-1-2">
- <img style="width:100%" src="/ggpht#{URI.parse(attachment["url"].as_s).request_target}">
- </div>
- </div>
- END_HTML
- when "video"
- html << <<-END_HTML
- <div class="pure-g">
- <div class="pure-u-1 pure-u-md-1-2">
- <div style="position:relative;width:100%;height:0;padding-bottom:56.25%;margin-bottom:5px">
- END_HTML
-
- if attachment["error"]?
- html << <<-END_HTML
- <p>#{attachment["error"]}</p>
- END_HTML
- else
- html << <<-END_HTML
- <iframe id='ivplayer' style='position:absolute;width:100%;height:100%;left:0;top:0' src='/embed/#{attachment["videoId"]?}?autoplay=0' style='border:none;'></iframe>
- END_HTML
- end
-
- html << <<-END_HTML
- </div>
- </div>
- </div>
- END_HTML
- else nil # Ignore
- end
- end
-
- html << <<-END_HTML
- <span title="#{Time.unix(child["published"].as_i64).to_s(translate(locale, "%A %B %-d, %Y"))}">#{translate(locale, "`x` ago", recode_date(Time.unix(child["published"].as_i64), locale))} #{child["isEdited"] == true ? translate(locale, "(edited)") : ""}</span>
- |
- END_HTML
-
- if comments["videoId"]?
- html << <<-END_HTML
- <a href="https://www.youtube.com/watch?v=#{comments["videoId"]}&lc=#{child["commentId"]}" title="#{translate(locale, "YouTube comment permalink")}">[YT]</a>
- |
- END_HTML
- elsif comments["authorId"]?
- html << <<-END_HTML
- <a href="https://www.youtube.com/channel/#{comments["authorId"]}/community?lb=#{child["commentId"]}" title="#{translate(locale, "YouTube comment permalink")}">[YT]</a>
- |
- END_HTML
- end
-
- html << <<-END_HTML
- <i class="icon ion-ios-thumbs-up"></i> #{number_with_separator(child["likeCount"])}
- END_HTML
-
- if child["creatorHeart"]?
- if !thin_mode
- creator_thumbnail = "/ggpht#{URI.parse(child["creatorHeart"]["creatorThumbnail"].as_s).request_target}"
- else
- creator_thumbnail = ""
- end
-
- html << <<-END_HTML
- <span class="creator-heart-container" title="#{translate(locale, "`x` marked it with a ❤", child["creatorHeart"]["creatorName"].as_s)}">
- <div class="creator-heart">
- <img class="creator-heart-background-hearted" src="#{creator_thumbnail}"></img>
- <div class="creator-heart-small-hearted">
- <div class="icon ion-ios-heart creator-heart-small-container"></div>
- </div>
- </div>
- </span>
- END_HTML
- end
-
- html << <<-END_HTML
- </p>
- #{replies_html}
- </div>
- </div>
- END_HTML
- end
-
- if comments["continuation"]?
- html << <<-END_HTML
- <div class="pure-g">
- <div class="pure-u-1">
- <p>
- <a href="javascript:void(0)" data-continuation="#{comments["continuation"]}"
- data-onclick="get_youtube_replies" data-load-more #{"data-load-replies" if is_replies}>#{translate(locale, "Load more")}</a>
- </p>
- </div>
- </div>
- END_HTML
- end
- end
-end
-
-def template_reddit_comments(root, locale)
- String.build do |html|
- root.each do |child|
- if child.data.is_a?(RedditComment)
- child = child.data.as(RedditComment)
- body_html = HTML.unescape(child.body_html)
-
- replies_html = ""
- if child.replies.is_a?(RedditThing)
- replies = child.replies.as(RedditThing)
- replies_html = template_reddit_comments(replies.data.as(RedditListing).children, locale)
- end
-
- if child.depth > 0
- html << <<-END_HTML
- <div class="pure-g">
- <div class="pure-u-1-24">
- </div>
- <div class="pure-u-23-24">
- END_HTML
- else
- html << <<-END_HTML
- <div class="pure-g">
- <div class="pure-u-1">
- END_HTML
- end
-
- html << <<-END_HTML
- <p>
- <a href="javascript:void(0)" data-onclick="toggle_parent">[ - ]</a>
- <b><a href="https://www.reddit.com/user/#{child.author}">#{child.author}</a></b>
- #{translate(locale, "`x` points", number_with_separator(child.score))}
- <span title="#{child.created_utc.to_s(translate(locale, "%a %B %-d %T %Y UTC"))}">#{translate(locale, "`x` ago", recode_date(child.created_utc, locale))}</span>
- <a href="https://www.reddit.com#{child.permalink}" title="#{translate(locale, "permalink")}">#{translate(locale, "permalink")}</a>
- </p>
- <div>
- #{body_html}
- #{replies_html}
- </div>
- </div>
- </div>
- END_HTML
- end
- end
- end
-end
-
-def replace_links(html)
- html = XML.parse_html(html)
-
- html.xpath_nodes(%q(//a)).each do |anchor|
- url = URI.parse(anchor["href"])
-
- if {"www.youtube.com", "m.youtube.com", "youtu.be"}.includes?(url.host)
- if url.path == "/redirect"
- params = HTTP::Params.parse(url.query.not_nil!)
- anchor["href"] = params["q"]?
- else
- anchor["href"] = url.request_target
- end
- elsif url.to_s == "#"
- begin
- length_seconds = decode_length_seconds(anchor.content)
- rescue ex
- length_seconds = decode_time(anchor.content)
- end
-
- if length_seconds > 0
- anchor["href"] = "javascript:void(0)"
- anchor["onclick"] = "player.currentTime(#{length_seconds})"
- else
- anchor["href"] = url.request_target
- end
- end
- end
-
- html = html.xpath_node(%q(//body)).not_nil!
- if node = html.xpath_node(%q(./p))
- html = node
- end
-
- return html.to_xml(options: XML::SaveOptions::NO_DECL)
-end
-
-def fill_links(html, scheme, host)
- html = XML.parse_html(html)
-
- html.xpath_nodes("//a").each do |match|
- url = URI.parse(match["href"])
- # Reddit links don't have host
- if !url.host && !match["href"].starts_with?("javascript") && !url.to_s.ends_with? "#"
- url.scheme = scheme
- url.host = host
- match["href"] = url
- end
- end
-
- if host == "www.youtube.com"
- html = html.xpath_node(%q(//body/p)).not_nil!
- end
-
- return html.to_xml(options: XML::SaveOptions::NO_DECL)
-end
-
-def parse_content(content : JSON::Any) : String
- content["simpleText"]?.try &.as_s.rchop('\ufeff').try { |b| HTML.escape(b) }.to_s ||
- content["runs"]?.try &.as_a.try { |r| content_to_comment_html(r).try &.to_s } || ""
-end
-
-def content_to_comment_html(content)
- comment_html = content.map do |run|
- text = HTML.escape(run["text"].as_s).gsub("\n", "<br>")
-
- if run["bold"]?
- text = "<b>#{text}</b>"
- end
-
- if run["italics"]?
- text = "<i>#{text}</i>"
- end
-
- if run["navigationEndpoint"]?
- if url = run["navigationEndpoint"]["urlEndpoint"]?.try &.["url"].as_s
- url = URI.parse(url)
-
- if !url.host || {"m.youtube.com", "www.youtube.com", "youtu.be"}.includes? url.host
- if url.path == "/redirect"
- url = HTTP::Params.parse(url.query.not_nil!)["q"]
- else
- url = url.request_target
- end
- end
-
- text = %(<a href="#{url}">#{text}</a>)
- elsif watch_endpoint = run["navigationEndpoint"]["watchEndpoint"]?
- length_seconds = watch_endpoint["startTimeSeconds"]?
- video_id = watch_endpoint["videoId"].as_s
-
- if length_seconds && length_seconds.as_i > 0
- text = %(<a href="javascript:void(0)" data-onclick="jump_to_time" data-jump-time="#{length_seconds}">#{text}</a>)
- else
- text = %(<a href="/watch?v=#{video_id}">#{text}</a>)
- end
- elsif url = run["navigationEndpoint"]["commandMetadata"]?.try &.["webCommandMetadata"]["url"].as_s
- text = %(<a href="#{url}">#{text}</a>)
- end
- end
-
- text
- end.join("").delete('\ufeff')
-
- return comment_html
-end
-
-def produce_comment_continuation(video_id, cursor = "", sort_by = "top")
- object = {
- "2:embedded" => {
- "2:string" => video_id,
- "25:varint" => 0_i64,
- "28:varint" => 1_i64,
- "36:embedded" => {
- "5:varint" => -1_i64,
- "8:varint" => 0_i64,
- },
- "40:embedded" => {
- "1:varint" => 4_i64,
- "3:string" => "https://www.youtube.com",
- "4:string" => "",
- },
- },
- "3:varint" => 6_i64,
- "6:embedded" => {
- "1:string" => cursor,
- "4:embedded" => {
- "4:string" => video_id,
- "6:varint" => 0_i64,
- },
- "5:varint" => 20_i64,
- },
- }
-
- case sort_by
- when "top"
- object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 0_i64
- when "new", "newest"
- object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 1_i64
- else # top
- object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 0_i64
- end
-
- continuation = object.try { |i| Protodec::Any.cast_json(object) }
- .try { |i| Protodec::Any.from_json(i) }
- .try { |i| Base64.urlsafe_encode(i) }
- .try { |i| URI.encode_www_form(i) }
-
- return continuation
-end
-
-def produce_comment_reply_continuation(video_id, ucid, comment_id)
- object = {
- "2:embedded" => {
- "2:string" => video_id,
- "24:varint" => 1_i64,
- "25:varint" => 1_i64,
- "28:varint" => 1_i64,
- "36:embedded" => {
- "5:varint" => -1_i64,
- "8:varint" => 0_i64,
- },
- },
- "3:varint" => 6_i64,
- "6:embedded" => {
- "3:embedded" => {
- "2:string" => comment_id,
- "4:embedded" => {
- "1:varint" => 0_i64,
- },
- "5:string" => ucid,
- "6:string" => video_id,
- "8:varint" => 1_i64,
- "9:varint" => 10_i64,
- },
- },
- }
-
- continuation = object.try { |i| Protodec::Any.cast_json(object) }
- .try { |i| Protodec::Any.from_json(i) }
- .try { |i| Base64.urlsafe_encode(i) }
- .try { |i| URI.encode_www_form(i) }
-
- return continuation
-end
diff --git a/src/invidious/comments/content.cr b/src/invidious/comments/content.cr
new file mode 100644
index 00000000..1f55bfe6
--- /dev/null
+++ b/src/invidious/comments/content.cr
@@ -0,0 +1,89 @@
+def text_to_parsed_content(text : String) : JSON::Any
+ nodes = [] of JSON::Any
+ # For each line convert line to array of nodes
+ text.split('\n').each do |line|
+ # In first case line is just a simple node before
+ # check patterns inside line
+ # { 'text': line }
+ current_nodes = [] of JSON::Any
+ initial_node = {"text" => line}
+ current_nodes << (JSON.parse(initial_node.to_json))
+
+ # For each match with url pattern, get last node and preserve
+ # last node before create new node with url information
+ # { 'text': match, 'navigationEndpoint': { 'urlEndpoint' : 'url': match } }
+ line.scan(/https?:\/\/[^ ]*/).each do |url_match|
+ # Retrieve last node and update node without match
+ last_node = current_nodes[-1].as_h
+ splitted_last_node = last_node["text"].as_s.split(url_match[0])
+ last_node["text"] = JSON.parse(splitted_last_node[0].to_json)
+ current_nodes[-1] = JSON.parse(last_node.to_json)
+ # Create new node with match and navigation infos
+ current_node = {"text" => url_match[0], "navigationEndpoint" => {"urlEndpoint" => {"url" => url_match[0]}}}
+ current_nodes << (JSON.parse(current_node.to_json))
+ # If text remain after match create new simple node with text after match
+ after_node = {"text" => splitted_last_node.size > 1 ? splitted_last_node[1] : ""}
+ current_nodes << (JSON.parse(after_node.to_json))
+ end
+
+ # After processing of matches inside line
+ # Add \n at end of last node for preserve carriage return
+ last_node = current_nodes[-1].as_h
+ last_node["text"] = JSON.parse("#{last_node["text"]}\n".to_json)
+ current_nodes[-1] = JSON.parse(last_node.to_json)
+
+ # Finally add final nodes to nodes returned
+ current_nodes.each do |node|
+ nodes << (node)
+ end
+ end
+ return JSON.parse({"runs" => nodes}.to_json)
+end
+
+def parse_content(content : JSON::Any, video_id : String? = "") : String
+ content["simpleText"]?.try &.as_s.rchop('\ufeff').try { |b| HTML.escape(b) }.to_s ||
+ content["runs"]?.try &.as_a.try { |r| content_to_comment_html(r, video_id).try &.to_s.gsub("\n", "<br>") } || ""
+end
+
+def content_to_comment_html(content, video_id : String? = "")
+ html_array = content.map do |run|
+ # Sometimes, there is an empty element.
+ # See: https://github.com/iv-org/invidious/issues/3096
+ next if run.as_h.empty?
+
+ text = HTML.escape(run["text"].as_s)
+
+ if navigation_endpoint = run.dig?("navigationEndpoint")
+ text = parse_link_endpoint(navigation_endpoint, text, video_id)
+ end
+
+ text = "<b>#{text}</b>" if run["bold"]?
+ text = "<s>#{text}</s>" if run["strikethrough"]?
+ text = "<i>#{text}</i>" if run["italics"]?
+
+ # check for custom emojis
+ if run["emoji"]?
+ if run["emoji"]["isCustomEmoji"]?.try &.as_bool
+ if emoji_image = run.dig?("emoji", "image")
+ emoji_alt = emoji_image.dig?("accessibility", "accessibilityData", "label").try &.as_s || text
+ emoji_thumb = emoji_image["thumbnails"][0]
+ text = String.build do |str|
+ str << %(<img alt=") << emoji_alt << "\" "
+ str << %(src="/ggpht) << URI.parse(emoji_thumb["url"].as_s).request_target << "\" "
+ str << %(title=") << emoji_alt << "\" "
+ str << %(width=") << emoji_thumb["width"] << "\" "
+ str << %(height=") << emoji_thumb["height"] << "\" "
+ str << %(class="channel-emoji" />)
+ end
+ else
+ # Hide deleted channel emoji
+ text = ""
+ end
+ end
+ end
+
+ text
+ end
+
+ return html_array.join("").delete('\ufeff')
+end
diff --git a/src/invidious/comments/links_util.cr b/src/invidious/comments/links_util.cr
new file mode 100644
index 00000000..f89b86d3
--- /dev/null
+++ b/src/invidious/comments/links_util.cr
@@ -0,0 +1,76 @@
+module Invidious::Comments
+ extend self
+
+ def replace_links(html)
+ # Check if the document is empty
+ # Prevents edge-case bug with Reddit comments, see issue #3115
+ if html.nil? || html.empty?
+ return html
+ end
+
+ html = XML.parse_html(html)
+
+ html.xpath_nodes(%q(//a)).each do |anchor|
+ url = URI.parse(anchor["href"])
+
+ if url.host.nil? || url.host.not_nil!.ends_with?("youtube.com") || url.host.not_nil!.ends_with?("youtu.be")
+ if url.host.try &.ends_with? "youtu.be"
+ url = "/watch?v=#{url.path.lstrip('/')}#{url.query_params}"
+ else
+ if url.path == "/redirect"
+ params = HTTP::Params.parse(url.query.not_nil!)
+ anchor["href"] = params["q"]?
+ else
+ anchor["href"] = url.request_target
+ end
+ end
+ elsif url.to_s == "#"
+ begin
+ length_seconds = decode_length_seconds(anchor.content)
+ rescue ex
+ length_seconds = decode_time(anchor.content)
+ end
+
+ if length_seconds > 0
+ anchor["href"] = "javascript:void(0)"
+ anchor["onclick"] = "player.currentTime(#{length_seconds})"
+ else
+ anchor["href"] = url.request_target
+ end
+ end
+ end
+
+ html = html.xpath_node(%q(//body)).not_nil!
+ if node = html.xpath_node(%q(./p))
+ html = node
+ end
+
+ return html.to_xml(options: XML::SaveOptions::NO_DECL)
+ end
+
+ def fill_links(html, scheme, host)
+ # Check if the document is empty
+ # Prevents edge-case bug with Reddit comments, see issue #3115
+ if html.nil? || html.empty?
+ return html
+ end
+
+ html = XML.parse_html(html)
+
+ html.xpath_nodes("//a").each do |match|
+ url = URI.parse(match["href"])
+ # Reddit links don't have host
+ if !url.host && !match["href"].starts_with?("javascript") && !url.to_s.ends_with? "#"
+ url.scheme = scheme
+ url.host = host
+ match["href"] = url
+ end
+ end
+
+ if host == "www.youtube.com"
+ html = html.xpath_node(%q(//body/p)).not_nil!
+ end
+
+ return html.to_xml(options: XML::SaveOptions::NO_DECL)
+ end
+end
diff --git a/src/invidious/comments/reddit.cr b/src/invidious/comments/reddit.cr
new file mode 100644
index 00000000..ba9c19f1
--- /dev/null
+++ b/src/invidious/comments/reddit.cr
@@ -0,0 +1,41 @@
+module Invidious::Comments
+ extend self
+
+ def fetch_reddit(id, sort_by = "confidence")
+ client = make_client(REDDIT_URL)
+ headers = HTTP::Headers{"User-Agent" => "web:invidious:v#{CURRENT_VERSION} (by github.com/iv-org/invidious)"}
+
+ # TODO: Use something like #479 for a static list of instances to use here
+ query = URI::Params.encode({q: "(url:3D#{id} OR url:#{id}) AND (site:invidio.us OR site:youtube.com OR site:youtu.be)"})
+ search_results = client.get("/search.json?#{query}", headers)
+
+ if search_results.status_code == 200
+ search_results = RedditThing.from_json(search_results.body)
+
+ # For videos that have more than one thread, choose the one with the highest score
+ threads = search_results.data.as(RedditListing).children
+ thread = threads.max_by?(&.data.as(RedditLink).score).try(&.data.as(RedditLink))
+ result = thread.try do |t|
+ body = client.get("/r/#{t.subreddit}/comments/#{t.id}.json?limit=100&sort=#{sort_by}", headers).body
+ Array(RedditThing).from_json(body)
+ end
+ result ||= [] of RedditThing
+ elsif search_results.status_code == 302
+ # Previously, if there was only one result then the API would redirect to that result.
+ # Now, it appears it will still return a listing so this section is likely unnecessary.
+
+ result = client.get(search_results.headers["Location"], headers).body
+ result = Array(RedditThing).from_json(result)
+
+ thread = result[0].data.as(RedditListing).children[0].data.as(RedditLink)
+ else
+ raise NotFoundException.new("Comments not found.")
+ end
+
+ client.close
+
+ comments = result[1]?.try(&.data.as(RedditListing).children)
+ comments ||= [] of RedditThing
+ return comments, thread
+ end
+end
diff --git a/src/invidious/comments/reddit_types.cr b/src/invidious/comments/reddit_types.cr
new file mode 100644
index 00000000..796a1183
--- /dev/null
+++ b/src/invidious/comments/reddit_types.cr
@@ -0,0 +1,57 @@
+class RedditThing
+ include JSON::Serializable
+
+ property kind : String
+ property data : RedditComment | RedditLink | RedditMore | RedditListing
+end
+
+class RedditComment
+ include JSON::Serializable
+
+ property author : String
+ property body_html : String
+ property replies : RedditThing | String
+ property score : Int32
+ property depth : Int32
+ property permalink : String
+
+ @[JSON::Field(converter: RedditComment::TimeConverter)]
+ property created_utc : Time
+
+ module TimeConverter
+ def self.from_json(value : JSON::PullParser) : Time
+ Time.unix(value.read_float.to_i)
+ end
+
+ def self.to_json(value : Time, json : JSON::Builder)
+ json.number(value.to_unix)
+ end
+ end
+end
+
+struct RedditLink
+ include JSON::Serializable
+
+ property author : String
+ property score : Int32
+ property subreddit : String
+ property num_comments : Int32
+ property id : String
+ property permalink : String
+ property title : String
+end
+
+struct RedditMore
+ include JSON::Serializable
+
+ property children : Array(String)
+ property count : Int32
+ property depth : Int32
+end
+
+class RedditListing
+ include JSON::Serializable
+
+ property children : Array(RedditThing)
+ property modhash : String
+end
diff --git a/src/invidious/comments/youtube.cr b/src/invidious/comments/youtube.cr
new file mode 100644
index 00000000..0716fcde
--- /dev/null
+++ b/src/invidious/comments/youtube.cr
@@ -0,0 +1,365 @@
+module Invidious::Comments
+ extend self
+
+ def fetch_youtube(id, cursor, format, locale, thin_mode, region, sort_by = "top")
+ case cursor
+ when nil, ""
+ ctoken = Comments.produce_continuation(id, cursor: "", sort_by: sort_by)
+ when .starts_with? "ADSJ"
+ ctoken = Comments.produce_continuation(id, cursor: cursor, sort_by: sort_by)
+ else
+ ctoken = cursor
+ end
+
+ client_config = YoutubeAPI::ClientConfig.new(region: region)
+ response = YoutubeAPI.next(continuation: ctoken, client_config: client_config)
+ return parse_youtube(id, response, format, locale, thin_mode, sort_by)
+ end
+
+ def fetch_community_post_comments(ucid, post_id)
+ object = {
+ "2:string" => "community",
+ "25:embedded" => {
+ "22:string" => post_id,
+ },
+ "45:embedded" => {
+ "2:varint" => 1_i64,
+ "3:varint" => 1_i64,
+ },
+ "53:embedded" => {
+ "4:embedded" => {
+ "6:varint" => 0_i64,
+ "27:varint" => 1_i64,
+ "29:string" => post_id,
+ "30:string" => ucid,
+ },
+ "8:string" => "comments-section",
+ },
+ }
+
+ object_parsed = object.try { |i| Protodec::Any.cast_json(i) }
+ .try { |i| Protodec::Any.from_json(i) }
+ .try { |i| Base64.urlsafe_encode(i) }
+
+ object2 = {
+ "80226972:embedded" => {
+ "2:string" => ucid,
+ "3:string" => object_parsed,
+ },
+ }
+
+ continuation = object2.try { |i| Protodec::Any.cast_json(i) }
+ .try { |i| Protodec::Any.from_json(i) }
+ .try { |i| Base64.urlsafe_encode(i) }
+ .try { |i| URI.encode_www_form(i) }
+
+ initial_data = YoutubeAPI.browse(continuation: continuation)
+ return initial_data
+ end
+
+ def parse_youtube(id, response, format, locale, thin_mode, sort_by = "top", is_post = false)
+ contents = nil
+
+ if on_response_received_endpoints = response["onResponseReceivedEndpoints"]?
+ header = nil
+ on_response_received_endpoints.as_a.each do |item|
+ if item["reloadContinuationItemsCommand"]?
+ case item["reloadContinuationItemsCommand"]["slot"]
+ when "RELOAD_CONTINUATION_SLOT_HEADER"
+ header = item["reloadContinuationItemsCommand"]["continuationItems"][0]
+ when "RELOAD_CONTINUATION_SLOT_BODY"
+ # continuationItems is nil when video has no comments
+ contents = item["reloadContinuationItemsCommand"]["continuationItems"]?
+ end
+ elsif item["appendContinuationItemsAction"]?
+ contents = item["appendContinuationItemsAction"]["continuationItems"]
+ end
+ end
+ elsif response["continuationContents"]?
+ response = response["continuationContents"]
+ if response["commentRepliesContinuation"]?
+ body = response["commentRepliesContinuation"]
+ else
+ body = response["itemSectionContinuation"]
+ end
+ contents = body["contents"]?
+ header = body["header"]?
+ else
+ raise NotFoundException.new("Comments not found.")
+ end
+
+ if !contents
+ if format == "json"
+ return {"comments" => [] of String}.to_json
+ else
+ return {"contentHtml" => "", "commentCount" => 0}.to_json
+ end
+ end
+
+ continuation_item_renderer = nil
+ contents.as_a.reject! do |item|
+ if item["continuationItemRenderer"]?
+ continuation_item_renderer = item["continuationItemRenderer"]
+ true
+ end
+ end
+
+ mutations = response.dig?("frameworkUpdates", "entityBatchUpdate", "mutations").try &.as_a || [] of JSON::Any
+
+ response = JSON.build do |json|
+ json.object do
+ if header
+ count_text = header["commentsHeaderRenderer"]["countText"]
+ comment_count = (count_text["simpleText"]? || count_text["runs"]?.try &.[0]?.try &.["text"]?)
+ .try &.as_s.gsub(/\D/, "").to_i? || 0
+ json.field "commentCount", comment_count
+ end
+
+ if is_post
+ json.field "postId", id
+ else
+ json.field "videoId", id
+ end
+
+ json.field "comments" do
+ json.array do
+ contents.as_a.each do |node|
+ json.object do
+ if node["commentThreadRenderer"]?
+ node = node["commentThreadRenderer"]
+ end
+
+ if node["replies"]?
+ node_replies = node["replies"]["commentRepliesRenderer"]
+ end
+
+ if cvm = node["commentViewModel"]?
+ # two commentViewModels for inital request
+ # one commentViewModel when getting a replies to a comment
+ cvm = cvm["commentViewModel"] if cvm["commentViewModel"]?
+
+ comment_key = cvm["commentKey"]
+ toolbar_key = cvm["toolbarStateKey"]
+ comment_mutation = mutations.find { |i| i.dig?("payload", "commentEntityPayload", "key") == comment_key }
+ toolbar_mutation = mutations.find { |i| i.dig?("entityKey") == toolbar_key }
+
+ if !comment_mutation.nil? && !toolbar_mutation.nil?
+ # todo parse styleRuns, commandRuns and attachmentRuns for comments
+ html_content = parse_description(comment_mutation.dig("payload", "commentEntityPayload", "properties", "content"), id)
+ comment_author = comment_mutation.dig("payload", "commentEntityPayload", "author")
+ json.field "authorId", comment_author["channelId"].as_s
+ json.field "authorUrl", "/channel/#{comment_author["channelId"].as_s}"
+ json.field "author", comment_author["displayName"].as_s
+ json.field "verified", comment_author["isVerified"].as_bool
+ json.field "authorThumbnails" do
+ json.array do
+ comment_mutation.dig?("payload", "commentEntityPayload", "avatar", "image", "sources").try &.as_a.each do |thumbnail|
+ json.object do
+ json.field "url", thumbnail["url"]
+ json.field "width", thumbnail["width"]
+ json.field "height", thumbnail["height"]
+ end
+ end
+ end
+ end
+
+ json.field "authorIsChannelOwner", comment_author["isCreator"].as_bool
+ json.field "isSponsor", (comment_author["sponsorBadgeUrl"]? != nil)
+
+ if sponsor_badge_url = comment_author["sponsorBadgeUrl"]?
+ # Sponsor icon thumbnails always have one object and there's only ever the url property in it
+ json.field "sponsorIconUrl", sponsor_badge_url
+ end
+
+ comment_toolbar = comment_mutation.dig("payload", "commentEntityPayload", "toolbar")
+ json.field "likeCount", short_text_to_number(comment_toolbar["likeCountNotliked"].as_s)
+ reply_count = short_text_to_number(comment_toolbar["replyCount"]?.try &.as_s || "0")
+
+ if heart_state = toolbar_mutation.dig?("payload", "engagementToolbarStateEntityPayload", "heartState")
+ if heart_state.as_s == "TOOLBAR_HEART_STATE_HEARTED"
+ json.field "creatorHeart" do
+ json.object do
+ json.field "creatorThumbnail", comment_toolbar["creatorThumbnailUrl"].as_s
+ json.field "creatorName", comment_toolbar["heartActiveTooltip"].as_s.sub("❤ by ", "")
+ end
+ end
+ end
+ end
+
+ published_text = comment_mutation.dig?("payload", "commentEntityPayload", "properties", "publishedTime").try &.as_s
+ end
+
+ json.field "isPinned", (cvm.dig?("pinnedText") != nil)
+ json.field "commentId", cvm["commentId"]
+ else
+ if node["comment"]?
+ node_comment = node["comment"]["commentRenderer"]
+ else
+ node_comment = node["commentRenderer"]
+ end
+ json.field "commentId", node_comment["commentId"]
+ html_content = node_comment["contentText"]?.try { |t| parse_content(t, id) }
+
+ json.field "verified", (node_comment["authorCommentBadge"]? != nil)
+
+ json.field "author", node_comment["authorText"]?.try &.["simpleText"]? || ""
+ json.field "authorThumbnails" do
+ json.array do
+ node_comment["authorThumbnail"]["thumbnails"].as_a.each do |thumbnail|
+ json.object do
+ json.field "url", thumbnail["url"]
+ json.field "width", thumbnail["width"]
+ json.field "height", thumbnail["height"]
+ end
+ end
+ end
+ end
+
+ if comment_action_buttons_renderer = node_comment.dig?("actionButtons", "commentActionButtonsRenderer")
+ json.field "likeCount", comment_action_buttons_renderer["likeButton"]["toggleButtonRenderer"]["accessibilityData"]["accessibilityData"]["label"].as_s.scan(/\d/).map(&.[0]).join.to_i
+ if comment_action_buttons_renderer["creatorHeart"]?
+ heart_data = comment_action_buttons_renderer["creatorHeart"]["creatorHeartRenderer"]["creatorThumbnail"]
+ json.field "creatorHeart" do
+ json.object do
+ json.field "creatorThumbnail", heart_data["thumbnails"][-1]["url"]
+ json.field "creatorName", heart_data["accessibility"]["accessibilityData"]["label"]
+ end
+ end
+ end
+ end
+
+ if node_comment["authorEndpoint"]?
+ json.field "authorId", node_comment["authorEndpoint"]["browseEndpoint"]["browseId"]
+ json.field "authorUrl", node_comment["authorEndpoint"]["browseEndpoint"]["canonicalBaseUrl"]
+ else
+ json.field "authorId", ""
+ json.field "authorUrl", ""
+ end
+
+ json.field "authorIsChannelOwner", node_comment["authorIsChannelOwner"]
+ json.field "isPinned", (node_comment["pinnedCommentBadge"]? != nil)
+ published_text = node_comment["publishedTimeText"]["runs"][0]["text"].as_s
+
+ json.field "isSponsor", (node_comment["sponsorCommentBadge"]? != nil)
+ if node_comment["sponsorCommentBadge"]?
+ # Sponsor icon thumbnails always have one object and there's only ever the url property in it
+ json.field "sponsorIconUrl", node_comment.dig("sponsorCommentBadge", "sponsorCommentBadgeRenderer", "customBadge", "thumbnails", 0, "url").to_s
+ end
+
+ reply_count = node_comment["replyCount"]?
+ end
+
+ content_html = html_content || ""
+ json.field "content", html_to_content(content_html)
+ json.field "contentHtml", content_html
+
+ if published_text != nil
+ published_text = published_text.to_s
+ if published_text.includes?(" (edited)")
+ json.field "isEdited", true
+ published = decode_date(published_text.rchop(" (edited)"))
+ else
+ json.field "isEdited", false
+ published = decode_date(published_text)
+ end
+
+ json.field "published", published.to_unix
+ json.field "publishedText", translate(locale, "`x` ago", recode_date(published, locale))
+ end
+
+ if node_replies && !response["commentRepliesContinuation"]?
+ if node_replies["continuations"]?
+ continuation = node_replies["continuations"]?.try &.as_a[0]["nextContinuationData"]["continuation"].as_s
+ elsif node_replies["contents"]?
+ continuation = node_replies["contents"]?.try &.as_a[0]["continuationItemRenderer"]["continuationEndpoint"]["continuationCommand"]["token"].as_s
+ end
+ continuation ||= ""
+
+ json.field "replies" do
+ json.object do
+ json.field "replyCount", reply_count || 1
+ json.field "continuation", continuation
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ if continuation_item_renderer
+ if continuation_item_renderer["continuationEndpoint"]?
+ continuation_endpoint = continuation_item_renderer["continuationEndpoint"]
+ elsif continuation_item_renderer["button"]?
+ continuation_endpoint = continuation_item_renderer["button"]["buttonRenderer"]["command"]
+ end
+ if continuation_endpoint
+ json.field "continuation", continuation_endpoint["continuationCommand"]["token"].as_s
+ end
+ end
+ end
+ end
+
+ if format == "html"
+ response = JSON.parse(response)
+ content_html = Frontend::Comments.template_youtube(response, locale, thin_mode)
+ response = JSON.build do |json|
+ json.object do
+ json.field "contentHtml", content_html
+
+ if response["commentCount"]?
+ json.field "commentCount", response["commentCount"]
+ else
+ json.field "commentCount", 0
+ end
+ end
+ end
+ end
+
+ return response
+ end
+
+ def produce_continuation(video_id, cursor = "", sort_by = "top")
+ object = {
+ "2:embedded" => {
+ "2:string" => video_id,
+ "25:varint" => 0_i64,
+ "28:varint" => 1_i64,
+ "36:embedded" => {
+ "5:varint" => -1_i64,
+ "8:varint" => 0_i64,
+ },
+ "40:embedded" => {
+ "1:varint" => 4_i64,
+ "3:string" => "https://www.youtube.com",
+ "4:string" => "",
+ },
+ },
+ "3:varint" => 6_i64,
+ "6:embedded" => {
+ "1:string" => cursor,
+ "4:embedded" => {
+ "4:string" => video_id,
+ "6:varint" => 0_i64,
+ },
+ "5:varint" => 20_i64,
+ },
+ }
+
+ case sort_by
+ when "top"
+ object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 0_i64
+ when "new", "newest"
+ object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 1_i64
+ else # top
+ object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 0_i64
+ end
+
+ continuation = object.try { |i| Protodec::Any.cast_json(i) }
+ .try { |i| Protodec::Any.from_json(i) }
+ .try { |i| Base64.urlsafe_encode(i) }
+ .try { |i| URI.encode_www_form(i) }
+
+ return continuation
+ end
+end
diff --git a/src/invidious/config.cr b/src/invidious/config.cr
new file mode 100644
index 00000000..ff768197
--- /dev/null
+++ b/src/invidious/config.cr
@@ -0,0 +1,256 @@
+struct DBConfig
+ include YAML::Serializable
+
+ property user : String
+ property password : String
+ property host : String
+ property port : Int32
+ property dbname : String
+end
+
+struct ConfigPreferences
+ include YAML::Serializable
+
+ property annotations : Bool = false
+ property annotations_subscribed : Bool = false
+ property preload : Bool = true
+ property autoplay : Bool = false
+ property captions : Array(String) = ["", "", ""]
+ property comments : Array(String) = ["youtube", ""]
+ property continue : Bool = false
+ property continue_autoplay : Bool = true
+ property dark_mode : String = ""
+ property latest_only : Bool = false
+ property listen : Bool = false
+ property local : Bool = false
+ property locale : String = "en-US"
+ property watch_history : Bool = true
+ property max_results : Int32 = 40
+ property notifications_only : Bool = false
+ property player_style : String = "invidious"
+ property quality : String = "hd720"
+ property quality_dash : String = "auto"
+ property default_home : String? = "Popular"
+ property feed_menu : Array(String) = ["Popular", "Trending", "Subscriptions", "Playlists"]
+ property automatic_instance_redirect : Bool = false
+ property region : String = "US"
+ property related_videos : Bool = true
+ property sort : String = "published"
+ property speed : Float32 = 1.0_f32
+ property thin_mode : Bool = false
+ property unseen_only : Bool = false
+ property video_loop : Bool = false
+ property extend_desc : Bool = false
+ property volume : Int32 = 100
+ property vr_mode : Bool = true
+ property show_nick : Bool = true
+ property save_player_pos : Bool = false
+
+ def to_tuple
+ {% begin %}
+ {
+ {{(@type.instance_vars.map { |var| "#{var.name}: #{var.name}".id }).splat}}
+ }
+ {% end %}
+ end
+end
+
+struct HTTPProxyConfig
+ include YAML::Serializable
+
+ property user : String
+ property password : String
+ property host : String
+ property port : Int32
+end
+
+class Config
+ include YAML::Serializable
+
+ # Number of threads to use for crawling videos from channels (for updating subscriptions)
+ property channel_threads : Int32 = 1
+ # Time interval between two executions of the job that crawls channel videos (subscriptions update).
+ @[YAML::Field(converter: Preferences::TimeSpanConverter)]
+ property channel_refresh_interval : Time::Span = 30.minutes
+ # Number of threads to use for updating feeds
+ property feed_threads : Int32 = 1
+ # Log file path or STDOUT
+ property output : String = "STDOUT"
+ # Default log level, valid YAML values are ints and strings, see src/invidious/helpers/logger.cr
+ property log_level : LogLevel = LogLevel::Info
+ # Enables colors in logs. Useful for debugging purposes
+ property colorize_logs : Bool = false
+ # Database configuration with separate parameters (username, hostname, etc)
+ property db : DBConfig? = nil
+
+ # Database configuration using 12-Factor "Database URL" syntax
+ @[YAML::Field(converter: Preferences::URIConverter)]
+ property database_url : URI = URI.parse("")
+ # Used for crawling channels: threads should check all videos uploaded by a channel
+ property full_refresh : Bool = false
+
+ # Jobs config structure. See jobs.cr and jobs/base_job.cr
+ property jobs = Invidious::Jobs::JobsConfig.new
+
+ # Used to tell Invidious it is behind a proxy, so links to resources should be https://
+ property https_only : Bool?
+ # HMAC signing key for CSRF tokens and verifying pubsub subscriptions
+ property hmac_key : String = ""
+ # Domain to be used for links to resources on the site where an absolute URL is required
+ property domain : String?
+ # Subscribe to channels using PubSubHubbub (requires domain, hmac_key)
+ property use_pubsub_feeds : Bool | Int32 = false
+ property popular_enabled : Bool = true
+ property captcha_enabled : Bool = true
+ property login_enabled : Bool = true
+ property registration_enabled : Bool = true
+ property statistics_enabled : Bool = false
+ property admins : Array(String) = [] of String
+ property external_port : Int32? = nil
+ property default_user_preferences : ConfigPreferences = ConfigPreferences.from_yaml("")
+ # For compliance with DMCA, disables download widget using list of video IDs
+ property dmca_content : Array(String) = [] of String
+ # Check table integrity, automatically try to add any missing columns, create tables, etc.
+ property check_tables : Bool = false
+ # Cache annotations requested from IA, will not cache empty annotations or annotations that only contain cards
+ property cache_annotations : Bool = false
+ # Optional banner to be displayed along top of page for announcements, etc.
+ property banner : String? = nil
+ # Enables 'Strict-Transport-Security'. Ensure that `domain` and all subdomains are served securely
+ property hsts : Bool? = true
+ # Disable proxying server-wide: options: 'dash', 'livestreams', 'downloads', 'local'
+ property disable_proxy : Bool? | Array(String)? = false
+ # Enable the user notifications for all users
+ property enable_user_notifications : Bool = true
+
+ # URL to the modified source code to be easily AGPL compliant
+ # Will display in the footer, next to the main source code link
+ property modified_source_code_url : String? = nil
+
+ # Connect to YouTube over 'ipv6', 'ipv4'. Will sometimes resolve fix issues with rate-limiting (see https://github.com/ytdl-org/youtube-dl/issues/21729)
+ @[YAML::Field(converter: Preferences::FamilyConverter)]
+ property force_resolve : Socket::Family = Socket::Family::UNSPEC
+
+ # External signature solver server socket (either a path to a UNIX domain socket or "<IP>:<Port>")
+ property signature_server : String? = nil
+
+ # Port to listen for connections (overridden by command line argument)
+ property port : Int32 = 3000
+ # Host to bind (overridden by command line argument)
+ property host_binding : String = "0.0.0.0"
+ # Make Invidious listening on UNIX sockets - Example: /tmp/invidious.sock
+ property bind_unix : String? = nil
+ # Pool size for HTTP requests to youtube.com and ytimg.com (each domain has a separate pool of `pool_size`)
+ property pool_size : Int32 = 100
+ # HTTP Proxy configuration
+ property http_proxy : HTTPProxyConfig? = nil
+
+ # Use Innertube's transcripts API instead of timedtext for closed captions
+ property use_innertube_for_captions : Bool = false
+
+ # visitor data ID for Google session
+ property visitor_data : String? = nil
+ # poToken for passing bot attestation
+ property po_token : String? = nil
+
+ # Saved cookies in "name1=value1; name2=value2..." format
+ @[YAML::Field(converter: Preferences::StringToCookies)]
+ property cookies : HTTP::Cookies = HTTP::Cookies.new
+
+ # Playlist length limit
+ property playlist_length_limit : Int32 = 500
+
+ def disabled?(option)
+ case disabled = CONFIG.disable_proxy
+ when Bool
+ return disabled
+ when Array
+ if disabled.includes? option
+ return true
+ else
+ return false
+ end
+ else
+ return false
+ end
+ end
+
+ def self.load
+ # Load config from file or YAML string env var
+ env_config_file = "INVIDIOUS_CONFIG_FILE"
+ env_config_yaml = "INVIDIOUS_CONFIG"
+
+ config_file = ENV.has_key?(env_config_file) ? ENV.fetch(env_config_file) : "config/config.yml"
+ config_yaml = ENV.has_key?(env_config_yaml) ? ENV.fetch(env_config_yaml) : File.read(config_file)
+
+ config = Config.from_yaml(config_yaml)
+
+ # Update config from env vars (upcased and prefixed with "INVIDIOUS_")
+ {% for ivar in Config.instance_vars %}
+ {% env_id = "INVIDIOUS_#{ivar.id.upcase}" %}
+
+ if ENV.has_key?({{env_id}})
+ env_value = ENV.fetch({{env_id}})
+ success = false
+
+ # Use YAML converter if specified
+ {% ann = ivar.annotation(::YAML::Field) %}
+ {% if ann && ann[:converter] %}
+ config.{{ivar.id}} = {{ann[:converter]}}.from_yaml(YAML::ParseContext.new, YAML::Nodes.parse(ENV.fetch({{env_id}})).nodes[0])
+ success = true
+
+ # Use regular YAML parser otherwise
+ {% else %}
+ {% ivar_types = ivar.type.union? ? ivar.type.union_types : [ivar.type] %}
+ # Sort types to avoid parsing nulls and numbers as strings
+ {% ivar_types = ivar_types.sort_by { |ivar_type| ivar_type == Nil ? 0 : ivar_type == Int32 ? 1 : 2 } %}
+ {{ivar_types}}.each do |ivar_type|
+ if !success
+ begin
+ config.{{ivar.id}} = ivar_type.from_yaml(env_value)
+ success = true
+ rescue
+ # nop
+ end
+ end
+ end
+ {% end %}
+
+ # Exit on fail
+ if !success
+ puts %(Config.{{ivar.id}} failed to parse #{env_value} as {{ivar.type}})
+ exit(1)
+ end
+ end
+ {% end %}
+
+ # HMAC_key is mandatory
+ # See: https://github.com/iv-org/invidious/issues/3854
+ if config.hmac_key.empty?
+ puts "Config: 'hmac_key' is required/can't be empty"
+ exit(1)
+ elsif config.hmac_key == "CHANGE_ME!!"
+ puts "Config: The value of 'hmac_key' needs to be changed!!"
+ exit(1)
+ end
+
+ # Build database_url from db.* if it's not set directly
+ if config.database_url.to_s.empty?
+ if db = config.db
+ config.database_url = URI.new(
+ scheme: "postgres",
+ user: db.user,
+ password: db.password,
+ host: db.host,
+ port: db.port,
+ path: db.dbname,
+ )
+ else
+ puts "Config: Either database_url or db.* is required"
+ exit(1)
+ end
+ end
+
+ return config
+ end
+end
diff --git a/src/invidious/database/annotations.cr b/src/invidious/database/annotations.cr
new file mode 100644
index 00000000..03749473
--- /dev/null
+++ b/src/invidious/database/annotations.cr
@@ -0,0 +1,24 @@
+require "./base.cr"
+
+module Invidious::Database::Annotations
+ extend self
+
+ def insert(id : String, annotations : String)
+ request = <<-SQL
+ INSERT INTO annotations
+ VALUES ($1, $2)
+ ON CONFLICT DO NOTHING
+ SQL
+
+ PG_DB.exec(request, id, annotations)
+ end
+
+ def select(id : String) : Annotation?
+ request = <<-SQL
+ SELECT * FROM annotations
+ WHERE id = $1
+ SQL
+
+ return PG_DB.query_one?(request, id, as: Annotation)
+ end
+end
diff --git a/src/invidious/database/base.cr b/src/invidious/database/base.cr
new file mode 100644
index 00000000..0fb1b6af
--- /dev/null
+++ b/src/invidious/database/base.cr
@@ -0,0 +1,136 @@
+require "pg"
+
+module Invidious::Database
+ extend self
+
+ # Checks table integrity
+ #
+ # Note: config is passed as a parameter to avoid complex
+ # dependencies between different parts of the software.
+ def check_integrity(cfg)
+ return if !cfg.check_tables
+ Invidious::Database.check_enum("privacy", PlaylistPrivacy)
+
+ Invidious::Database.check_table("channels", InvidiousChannel)
+ Invidious::Database.check_table("channel_videos", ChannelVideo)
+ Invidious::Database.check_table("playlists", InvidiousPlaylist)
+ Invidious::Database.check_table("playlist_videos", PlaylistVideo)
+ Invidious::Database.check_table("nonces", Nonce)
+ Invidious::Database.check_table("session_ids", SessionId)
+ Invidious::Database.check_table("users", User)
+ Invidious::Database.check_table("videos", Video)
+
+ if cfg.cache_annotations
+ Invidious::Database.check_table("annotations", Annotation)
+ end
+ end
+
+ #
+ # Table/enum integrity checks
+ #
+
+ def check_enum(enum_name, struct_type = nil)
+ return # TODO
+
+ if !PG_DB.query_one?("SELECT true FROM pg_type WHERE typname = $1", enum_name, as: Bool)
+ LOGGER.info("check_enum: CREATE TYPE #{enum_name}")
+
+ PG_DB.using_connection do |conn|
+ conn.as(PG::Connection).exec_all(File.read("config/sql/#{enum_name}.sql"))
+ end
+ end
+ end
+
+ def check_table(table_name, struct_type = nil)
+ # Create table if it doesn't exist
+ begin
+ PG_DB.exec("SELECT * FROM #{table_name} LIMIT 0")
+ rescue ex
+ LOGGER.info("check_table: check_table: CREATE TABLE #{table_name}")
+
+ PG_DB.using_connection do |conn|
+ conn.as(PG::Connection).exec_all(File.read("config/sql/#{table_name}.sql"))
+ end
+ end
+
+ return if !struct_type
+
+ struct_array = struct_type.type_array
+ column_array = get_column_array(PG_DB, table_name)
+ column_types = File.read("config/sql/#{table_name}.sql").match(/CREATE TABLE public\.#{table_name}\n\((?<types>[\d\D]*?)\);/)
+ .try &.["types"].split(",").map(&.strip).reject &.starts_with?("CONSTRAINT")
+
+ return if !column_types
+
+ struct_array.each_with_index do |name, i|
+ if name != column_array[i]?
+ if !column_array[i]?
+ new_column = column_types.select(&.starts_with?(name))[0]
+ LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
+ PG_DB.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
+ next
+ end
+
+ # Column doesn't exist
+ if !column_array.includes? name
+ new_column = column_types.select(&.starts_with?(name))[0]
+ PG_DB.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
+ end
+
+ # Column exists but in the wrong position, rotate
+ if struct_array.includes? column_array[i]
+ until name == column_array[i]
+ new_column = column_types.select(&.starts_with?(column_array[i]))[0]?.try &.gsub("#{column_array[i]}", "#{column_array[i]}_new")
+
+ # There's a column we didn't expect
+ if !new_column
+ LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}")
+ PG_DB.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
+
+ column_array = get_column_array(PG_DB, table_name)
+ next
+ end
+
+ LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
+ PG_DB.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
+
+ LOGGER.info("check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
+ PG_DB.exec("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
+
+ LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
+ PG_DB.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
+
+ LOGGER.info("check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
+ PG_DB.exec("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
+
+ column_array = get_column_array(PG_DB, table_name)
+ end
+ else
+ LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
+ PG_DB.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
+ end
+ end
+ end
+
+ return if column_array.size <= struct_array.size
+
+ column_array.each do |column|
+ if !struct_array.includes? column
+ LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
+ PG_DB.exec("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
+ end
+ end
+ end
+
+ def get_column_array(db, table_name)
+ column_array = [] of String
+ PG_DB.query("SELECT * FROM #{table_name} LIMIT 0") do |rs|
+ rs.column_count.times do |i|
+ column = rs.as(PG::ResultSet).field(i)
+ column_array << column.name
+ end
+ end
+
+ return column_array
+ end
+end
diff --git a/src/invidious/database/channels.cr b/src/invidious/database/channels.cr
new file mode 100644
index 00000000..df44e485
--- /dev/null
+++ b/src/invidious/database/channels.cr
@@ -0,0 +1,158 @@
+require "./base.cr"
+
+#
+# This module contains functions related to the "channels" table.
+#
+module Invidious::Database::Channels
+ extend self
+
+ # -------------------
+ # Insert / delete
+ # -------------------
+
+ def insert(channel : InvidiousChannel, update_on_conflict : Bool = false)
+ channel_array = channel.to_a
+
+ request = <<-SQL
+ INSERT INTO channels
+ VALUES (#{arg_array(channel_array)})
+ SQL
+
+ if update_on_conflict
+ request += <<-SQL
+ ON CONFLICT (id) DO UPDATE
+ SET author = $2, updated = $3
+ SQL
+ end
+
+ PG_DB.exec(request, args: channel_array)
+ end
+
+ # -------------------
+ # Update
+ # -------------------
+
+ def update_author(id : String, author : String)
+ request = <<-SQL
+ UPDATE channels
+ SET updated = now(), author = $1, deleted = false
+ WHERE id = $2
+ SQL
+
+ PG_DB.exec(request, author, id)
+ end
+
+ def update_subscription_time(id : String)
+ request = <<-SQL
+ UPDATE channels
+ SET subscribed = now()
+ WHERE id = $1
+ SQL
+
+ PG_DB.exec(request, id)
+ end
+
+ def update_mark_deleted(id : String)
+ request = <<-SQL
+ UPDATE channels
+ SET updated = now(), deleted = true
+ WHERE id = $1
+ SQL
+
+ PG_DB.exec(request, id)
+ end
+
+ # -------------------
+ # Select
+ # -------------------
+
+ def select(id : String) : InvidiousChannel?
+ request = <<-SQL
+ SELECT * FROM channels
+ WHERE id = $1
+ SQL
+
+ return PG_DB.query_one?(request, id, as: InvidiousChannel)
+ end
+
+ def select(ids : Array(String)) : Array(InvidiousChannel)?
+ return [] of InvidiousChannel if ids.empty?
+
+ request = <<-SQL
+ SELECT * FROM channels
+ WHERE id = ANY($1)
+ SQL
+
+ return PG_DB.query_all(request, ids, as: InvidiousChannel)
+ end
+end
+
+#
+# This module contains functions related to the "channel_videos" table.
+#
+module Invidious::Database::ChannelVideos
+ extend self
+
+ # -------------------
+ # Insert
+ # -------------------
+
+ # This function returns the status of the query (i.e: success?)
+ def insert(video : ChannelVideo, with_premiere_timestamp : Bool = false) : Bool
+ if with_premiere_timestamp
+ last_items = "premiere_timestamp = $9, views = $10"
+ else
+ last_items = "views = $10"
+ end
+
+ request = <<-SQL
+ INSERT INTO channel_videos
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
+ ON CONFLICT (id) DO UPDATE
+ SET title = $2, published = $3, updated = $4, ucid = $5,
+ author = $6, length_seconds = $7, live_now = $8, #{last_items}
+ RETURNING (xmax=0) AS was_insert
+ SQL
+
+ return PG_DB.query_one(request, *video.to_tuple, as: Bool)
+ end
+
+ # -------------------
+ # Select
+ # -------------------
+
+ def select(ids : Array(String)) : Array(ChannelVideo)
+ return [] of ChannelVideo if ids.empty?
+
+ request = <<-SQL
+ SELECT * FROM channel_videos
+ WHERE id = ANY($1)
+ ORDER BY published DESC
+ SQL
+
+ return PG_DB.query_all(request, ids, as: ChannelVideo)
+ end
+
+ def select_notfications(ucid : String, since : Time) : Array(ChannelVideo)
+ request = <<-SQL
+ SELECT * FROM channel_videos
+ WHERE ucid = $1 AND published > $2
+ ORDER BY published DESC
+ LIMIT 15
+ SQL
+
+ return PG_DB.query_all(request, ucid, since, as: ChannelVideo)
+ end
+
+ def select_popular_videos : Array(ChannelVideo)
+ request = <<-SQL
+ SELECT DISTINCT ON (ucid) *
+ FROM channel_videos
+ WHERE ucid IN (SELECT channel FROM (SELECT UNNEST(subscriptions) AS channel FROM users) AS d
+ GROUP BY channel ORDER BY COUNT(channel) DESC LIMIT 40)
+ ORDER BY ucid, published DESC
+ SQL
+
+ PG_DB.query_all(request, as: ChannelVideo)
+ end
+end
diff --git a/src/invidious/database/migration.cr b/src/invidious/database/migration.cr
new file mode 100644
index 00000000..921d8f38
--- /dev/null
+++ b/src/invidious/database/migration.cr
@@ -0,0 +1,38 @@
+abstract class Invidious::Database::Migration
+ macro inherited
+ Migrator.migrations << self
+ end
+
+ @@version : Int64?
+
+ def self.version(version : Int32 | Int64)
+ @@version = version.to_i64
+ end
+
+ getter? completed = false
+
+ def initialize(@db : DB::Database)
+ end
+
+ abstract def up(conn : DB::Connection)
+
+ def migrate
+ # migrator already ignores completed migrations
+ # but this is an extra check to make sure a migration doesn't run twice
+ return if completed?
+
+ @db.transaction do |txn|
+ up(txn.connection)
+ track(txn.connection)
+ @completed = true
+ end
+ end
+
+ def version : Int64
+ @@version.not_nil!
+ end
+
+ private def track(conn : DB::Connection)
+ conn.exec("INSERT INTO #{Migrator::MIGRATIONS_TABLE} (version) VALUES ($1)", version)
+ end
+end
diff --git a/src/invidious/database/migrations/0001_create_channels_table.cr b/src/invidious/database/migrations/0001_create_channels_table.cr
new file mode 100644
index 00000000..a1362bcf
--- /dev/null
+++ b/src/invidious/database/migrations/0001_create_channels_table.cr
@@ -0,0 +1,30 @@
+module Invidious::Database::Migrations
+ class CreateChannelsTable < Migration
+ version 1
+
+ def up(conn : DB::Connection)
+ conn.exec <<-SQL
+ CREATE TABLE IF NOT EXISTS public.channels
+ (
+ id text NOT NULL,
+ author text,
+ updated timestamp with time zone,
+ deleted boolean,
+ subscribed timestamp with time zone,
+ CONSTRAINT channels_id_key UNIQUE (id)
+ );
+ SQL
+
+ conn.exec <<-SQL
+ GRANT ALL ON TABLE public.channels TO current_user;
+ SQL
+
+ conn.exec <<-SQL
+ CREATE INDEX IF NOT EXISTS channels_id_idx
+ ON public.channels
+ USING btree
+ (id COLLATE pg_catalog."default");
+ SQL
+ end
+ end
+end
diff --git a/src/invidious/database/migrations/0002_create_videos_table.cr b/src/invidious/database/migrations/0002_create_videos_table.cr
new file mode 100644
index 00000000..c2ac84f8
--- /dev/null
+++ b/src/invidious/database/migrations/0002_create_videos_table.cr
@@ -0,0 +1,28 @@
+module Invidious::Database::Migrations
+ class CreateVideosTable < Migration
+ version 2
+
+ def up(conn : DB::Connection)
+ conn.exec <<-SQL
+ CREATE UNLOGGED TABLE IF NOT EXISTS public.videos
+ (
+ id text NOT NULL,
+ info text,
+ updated timestamp with time zone,
+ CONSTRAINT videos_pkey PRIMARY KEY (id)
+ );
+ SQL
+
+ conn.exec <<-SQL
+ GRANT ALL ON TABLE public.videos TO current_user;
+ SQL
+
+ conn.exec <<-SQL
+ CREATE UNIQUE INDEX IF NOT EXISTS id_idx
+ ON public.videos
+ USING btree
+ (id COLLATE pg_catalog."default");
+ SQL
+ end
+ end
+end
diff --git a/src/invidious/database/migrations/0003_create_channel_videos_table.cr b/src/invidious/database/migrations/0003_create_channel_videos_table.cr
new file mode 100644
index 00000000..c9b62e4c
--- /dev/null
+++ b/src/invidious/database/migrations/0003_create_channel_videos_table.cr
@@ -0,0 +1,35 @@
+module Invidious::Database::Migrations
+ class CreateChannelVideosTable < Migration
+ version 3
+
+ def up(conn : DB::Connection)
+ conn.exec <<-SQL
+ CREATE TABLE IF NOT EXISTS public.channel_videos
+ (
+ id text NOT NULL,
+ title text,
+ published timestamp with time zone,
+ updated timestamp with time zone,
+ ucid text,
+ author text,
+ length_seconds integer,
+ live_now boolean,
+ premiere_timestamp timestamp with time zone,
+ views bigint,
+ CONSTRAINT channel_videos_id_key UNIQUE (id)
+ );
+ SQL
+
+ conn.exec <<-SQL
+ GRANT ALL ON TABLE public.channel_videos TO current_user;
+ SQL
+
+ conn.exec <<-SQL
+ CREATE INDEX IF NOT EXISTS channel_videos_ucid_idx
+ ON public.channel_videos
+ USING btree
+ (ucid COLLATE pg_catalog."default");
+ SQL
+ end
+ end
+end
diff --git a/src/invidious/database/migrations/0004_create_users_table.cr b/src/invidious/database/migrations/0004_create_users_table.cr
new file mode 100644
index 00000000..a13ba15f
--- /dev/null
+++ b/src/invidious/database/migrations/0004_create_users_table.cr
@@ -0,0 +1,34 @@
+module Invidious::Database::Migrations
+ class CreateUsersTable < Migration
+ version 4
+
+ def up(conn : DB::Connection)
+ conn.exec <<-SQL
+ CREATE TABLE IF NOT EXISTS public.users
+ (
+ updated timestamp with time zone,
+ notifications text[],
+ subscriptions text[],
+ email text NOT NULL,
+ preferences text,
+ password text,
+ token text,
+ watched text[],
+ feed_needs_update boolean,
+ CONSTRAINT users_email_key UNIQUE (email)
+ );
+ SQL
+
+ conn.exec <<-SQL
+ GRANT ALL ON TABLE public.users TO current_user;
+ SQL
+
+ conn.exec <<-SQL
+ CREATE UNIQUE INDEX IF NOT EXISTS email_unique_idx
+ ON public.users
+ USING btree
+ (lower(email) COLLATE pg_catalog."default");
+ SQL
+ end
+ end
+end
diff --git a/src/invidious/database/migrations/0005_create_session_ids_table.cr b/src/invidious/database/migrations/0005_create_session_ids_table.cr
new file mode 100644
index 00000000..13c2228d
--- /dev/null
+++ b/src/invidious/database/migrations/0005_create_session_ids_table.cr
@@ -0,0 +1,28 @@
+module Invidious::Database::Migrations
+ class CreateSessionIdsTable < Migration
+ version 5
+
+ def up(conn : DB::Connection)
+ conn.exec <<-SQL
+ CREATE TABLE IF NOT EXISTS public.session_ids
+ (
+ id text NOT NULL,
+ email text,
+ issued timestamp with time zone,
+ CONSTRAINT session_ids_pkey PRIMARY KEY (id)
+ );
+ SQL
+
+ conn.exec <<-SQL
+ GRANT ALL ON TABLE public.session_ids TO current_user;
+ SQL
+
+ conn.exec <<-SQL
+ CREATE INDEX IF NOT EXISTS session_ids_id_idx
+ ON public.session_ids
+ USING btree
+ (id COLLATE pg_catalog."default");
+ SQL
+ end
+ end
+end
diff --git a/src/invidious/database/migrations/0006_create_nonces_table.cr b/src/invidious/database/migrations/0006_create_nonces_table.cr
new file mode 100644
index 00000000..cf1229e1
--- /dev/null
+++ b/src/invidious/database/migrations/0006_create_nonces_table.cr
@@ -0,0 +1,27 @@
+module Invidious::Database::Migrations
+ class CreateNoncesTable < Migration
+ version 6
+
+ def up(conn : DB::Connection)
+ conn.exec <<-SQL
+ CREATE TABLE IF NOT EXISTS public.nonces
+ (
+ nonce text,
+ expire timestamp with time zone,
+ CONSTRAINT nonces_id_key UNIQUE (nonce)
+ );
+ SQL
+
+ conn.exec <<-SQL
+ GRANT ALL ON TABLE public.nonces TO current_user;
+ SQL
+
+ conn.exec <<-SQL
+ CREATE INDEX IF NOT EXISTS nonces_nonce_idx
+ ON public.nonces
+ USING btree
+ (nonce COLLATE pg_catalog."default");
+ SQL
+ end
+ end
+end
diff --git a/src/invidious/database/migrations/0007_create_annotations_table.cr b/src/invidious/database/migrations/0007_create_annotations_table.cr
new file mode 100644
index 00000000..dcecbc3b
--- /dev/null
+++ b/src/invidious/database/migrations/0007_create_annotations_table.cr
@@ -0,0 +1,20 @@
+module Invidious::Database::Migrations
+ class CreateAnnotationsTable < Migration
+ version 7
+
+ def up(conn : DB::Connection)
+ conn.exec <<-SQL
+ CREATE TABLE IF NOT EXISTS public.annotations
+ (
+ id text NOT NULL,
+ annotations xml,
+ CONSTRAINT annotations_id_key UNIQUE (id)
+ );
+ SQL
+
+ conn.exec <<-SQL
+ GRANT ALL ON TABLE public.annotations TO current_user;
+ SQL
+ end
+ end
+end
diff --git a/src/invidious/database/migrations/0008_create_playlists_table.cr b/src/invidious/database/migrations/0008_create_playlists_table.cr
new file mode 100644
index 00000000..6aa16e1a
--- /dev/null
+++ b/src/invidious/database/migrations/0008_create_playlists_table.cr
@@ -0,0 +1,50 @@
+module Invidious::Database::Migrations
+ class CreatePlaylistsTable < Migration
+ version 8
+
+ def up(conn : DB::Connection)
+ if !privacy_type_exists?(conn)
+ conn.exec <<-SQL
+ CREATE TYPE public.privacy AS ENUM
+ (
+ 'Public',
+ 'Unlisted',
+ 'Private'
+ );
+ SQL
+ end
+
+ conn.exec <<-SQL
+ CREATE TABLE IF NOT EXISTS public.playlists
+ (
+ title text,
+ id text primary key,
+ author text,
+ description text,
+ video_count integer,
+ created timestamptz,
+ updated timestamptz,
+ privacy privacy,
+ index int8[]
+ );
+ SQL
+
+ conn.exec <<-SQL
+ GRANT ALL ON public.playlists TO current_user;
+ SQL
+ end
+
+ private def privacy_type_exists?(conn : DB::Connection) : Bool
+ request = <<-SQL
+ SELECT 1 AS one
+ FROM pg_type
+ INNER JOIN pg_namespace ON pg_namespace.oid = pg_type.typnamespace
+ WHERE pg_namespace.nspname = 'public'
+ AND pg_type.typname = 'privacy'
+ LIMIT 1;
+ SQL
+
+ !conn.query_one?(request, as: Int32).nil?
+ end
+ end
+end
diff --git a/src/invidious/database/migrations/0009_create_playlist_videos_table.cr b/src/invidious/database/migrations/0009_create_playlist_videos_table.cr
new file mode 100644
index 00000000..84938b9b
--- /dev/null
+++ b/src/invidious/database/migrations/0009_create_playlist_videos_table.cr
@@ -0,0 +1,27 @@
+module Invidious::Database::Migrations
+ class CreatePlaylistVideosTable < Migration
+ version 9
+
+ def up(conn : DB::Connection)
+ conn.exec <<-SQL
+ CREATE TABLE IF NOT EXISTS public.playlist_videos
+ (
+ title text,
+ id text,
+ author text,
+ ucid text,
+ length_seconds integer,
+ published timestamptz,
+ plid text references playlists(id),
+ index int8,
+ live_now boolean,
+ PRIMARY KEY (index,plid)
+ );
+ SQL
+
+ conn.exec <<-SQL
+ GRANT ALL ON TABLE public.playlist_videos TO current_user;
+ SQL
+ end
+ end
+end
diff --git a/src/invidious/database/migrations/0010_make_videos_unlogged.cr b/src/invidious/database/migrations/0010_make_videos_unlogged.cr
new file mode 100644
index 00000000..f5d19683
--- /dev/null
+++ b/src/invidious/database/migrations/0010_make_videos_unlogged.cr
@@ -0,0 +1,11 @@
+module Invidious::Database::Migrations
+ class MakeVideosUnlogged < Migration
+ version 10
+
+ def up(conn : DB::Connection)
+ conn.exec <<-SQL
+ ALTER TABLE public.videos SET UNLOGGED;
+ SQL
+ end
+ end
+end
diff --git a/src/invidious/database/migrator.cr b/src/invidious/database/migrator.cr
new file mode 100644
index 00000000..660c3203
--- /dev/null
+++ b/src/invidious/database/migrator.cr
@@ -0,0 +1,49 @@
+class Invidious::Database::Migrator
+ MIGRATIONS_TABLE = "public.invidious_migrations"
+
+ class_getter migrations = [] of Invidious::Database::Migration.class
+
+ def initialize(@db : DB::Database)
+ end
+
+ def migrate
+ versions = load_versions
+
+ ran_migration = false
+ load_migrations.sort_by(&.version)
+ .each do |migration|
+ next if versions.includes?(migration.version)
+
+ puts "Running migration: #{migration.class.name}"
+ migration.migrate
+ ran_migration = true
+ end
+
+ puts "No migrations to run." unless ran_migration
+ end
+
+ def pending_migrations? : Bool
+ versions = load_versions
+
+ load_migrations.sort_by(&.version)
+ .any? { |migration| !versions.includes?(migration.version) }
+ end
+
+ private def load_migrations : Array(Invidious::Database::Migration)
+ self.class.migrations.map(&.new(@db))
+ end
+
+ private def load_versions : Array(Int64)
+ create_migrations_table
+ @db.query_all("SELECT version FROM #{MIGRATIONS_TABLE}", as: Int64)
+ end
+
+ private def create_migrations_table
+ @db.exec <<-SQL
+ CREATE TABLE IF NOT EXISTS #{MIGRATIONS_TABLE} (
+ id bigserial PRIMARY KEY,
+ version bigint NOT NULL
+ )
+ SQL
+ end
+end
diff --git a/src/invidious/database/nonces.cr b/src/invidious/database/nonces.cr
new file mode 100644
index 00000000..b87c81ec
--- /dev/null
+++ b/src/invidious/database/nonces.cr
@@ -0,0 +1,55 @@
+require "./base.cr"
+
+module Invidious::Database::Nonces
+ extend self
+
+ # -------------------
+ # Insert / Delete
+ # -------------------
+
+ def insert(nonce : String, expire : Time)
+ request = <<-SQL
+ INSERT INTO nonces
+ VALUES ($1, $2)
+ ON CONFLICT DO NOTHING
+ SQL
+
+ PG_DB.exec(request, nonce, expire)
+ end
+
+ def delete_expired
+ request = <<-SQL
+ DELETE FROM nonces *
+ WHERE expire < now()
+ SQL
+
+ PG_DB.exec(request)
+ end
+
+ # -------------------
+ # Update
+ # -------------------
+
+ def update_set_expired(nonce : String)
+ request = <<-SQL
+ UPDATE nonces
+ SET expire = $1
+ WHERE nonce = $2
+ SQL
+
+ PG_DB.exec(request, Time.utc(1990, 1, 1), nonce)
+ end
+
+ # -------------------
+ # Select
+ # -------------------
+
+ def select(nonce : String) : Tuple(String, Time)?
+ request = <<-SQL
+ SELECT * FROM nonces
+ WHERE nonce = $1
+ SQL
+
+ return PG_DB.query_one?(request, nonce, as: {String, Time})
+ end
+end
diff --git a/src/invidious/database/playlists.cr b/src/invidious/database/playlists.cr
new file mode 100644
index 00000000..08aa719a
--- /dev/null
+++ b/src/invidious/database/playlists.cr
@@ -0,0 +1,262 @@
+require "./base.cr"
+
+#
+# This module contains functions related to the "playlists" table.
+#
+module Invidious::Database::Playlists
+ extend self
+
+ # -------------------
+ # Insert / delete
+ # -------------------
+
+ def insert(playlist : InvidiousPlaylist)
+ playlist_array = playlist.to_a
+
+ request = <<-SQL
+ INSERT INTO playlists
+ VALUES (#{arg_array(playlist_array)})
+ SQL
+
+ PG_DB.exec(request, args: playlist_array)
+ end
+
+ # deletes the given playlist and connected playlist videos
+ def delete(id : String)
+ PlaylistVideos.delete_by_playlist(id)
+ request = <<-SQL
+ DELETE FROM playlists *
+ WHERE id = $1
+ SQL
+
+ PG_DB.exec(request, id)
+ end
+
+ # -------------------
+ # Update
+ # -------------------
+
+ def update(id : String, title : String, privacy, description, updated)
+ request = <<-SQL
+ UPDATE playlists
+ SET title = $1, privacy = $2, description = $3, updated = $4
+ WHERE id = $5
+ SQL
+
+ PG_DB.exec(request, title, privacy, description, updated, id)
+ end
+
+ def update_description(id : String, description)
+ request = <<-SQL
+ UPDATE playlists
+ SET description = $1
+ WHERE id = $2
+ SQL
+
+ PG_DB.exec(request, description, id)
+ end
+
+ def update_subscription_time(id : String)
+ request = <<-SQL
+ UPDATE playlists
+ SET subscribed = now()
+ WHERE id = $1
+ SQL
+
+ PG_DB.exec(request, id)
+ end
+
+ def update_video_added(id : String, index : String | Int64)
+ request = <<-SQL
+ UPDATE playlists
+ SET index = array_append(index, $1),
+ video_count = cardinality(index) + 1,
+ updated = now()
+ WHERE id = $2
+ SQL
+
+ PG_DB.exec(request, index, id)
+ end
+
+ def update_video_removed(id : String, index : String | Int64)
+ request = <<-SQL
+ UPDATE playlists
+ SET index = array_remove(index, $1),
+ video_count = cardinality(index) - 1,
+ updated = now()
+ WHERE id = $2
+ SQL
+
+ PG_DB.exec(request, index, id)
+ end
+
+ # -------------------
+ # Salect
+ # -------------------
+
+ def select(*, id : String) : InvidiousPlaylist?
+ request = <<-SQL
+ SELECT * FROM playlists
+ WHERE id = $1
+ SQL
+
+ return PG_DB.query_one?(request, id, as: InvidiousPlaylist)
+ end
+
+ def select_all(*, author : String) : Array(InvidiousPlaylist)
+ request = <<-SQL
+ SELECT * FROM playlists
+ WHERE author = $1
+ SQL
+
+ return PG_DB.query_all(request, author, as: InvidiousPlaylist)
+ end
+
+ # -------------------
+ # Salect (filtered)
+ # -------------------
+
+ def select_like_iv(email : String) : Array(InvidiousPlaylist)
+ request = <<-SQL
+ SELECT * FROM playlists
+ WHERE author = $1 AND id LIKE 'IV%'
+ ORDER BY created
+ SQL
+
+ PG_DB.query_all(request, email, as: InvidiousPlaylist)
+ end
+
+ def select_not_like_iv(email : String) : Array(InvidiousPlaylist)
+ request = <<-SQL
+ SELECT * FROM playlists
+ WHERE author = $1 AND id NOT LIKE 'IV%'
+ ORDER BY created
+ SQL
+
+ PG_DB.query_all(request, email, as: InvidiousPlaylist)
+ end
+
+ def select_user_created_playlists(email : String) : Array({String, String})
+ request = <<-SQL
+ SELECT id,title FROM playlists
+ WHERE author = $1 AND id LIKE 'IV%'
+ ORDER BY title
+ SQL
+
+ PG_DB.query_all(request, email, as: {String, String})
+ end
+
+ # -------------------
+ # Misc checks
+ # -------------------
+
+ # Check if given playlist ID exists
+ def exists?(id : String) : Bool
+ request = <<-SQL
+ SELECT id FROM playlists
+ WHERE id = $1
+ SQL
+
+ return PG_DB.query_one?(request, id, as: String).nil?
+ end
+
+ # Count how many playlist a user has created.
+ def count_owned_by(author : String) : Int64
+ request = <<-SQL
+ SELECT count(*) FROM playlists
+ WHERE author = $1
+ SQL
+
+ return PG_DB.query_one?(request, author, as: Int64) || 0_i64
+ end
+end
+
+#
+# This module contains functions related to the "playlist_videos" table.
+#
+module Invidious::Database::PlaylistVideos
+ extend self
+
+ private alias VideoIndex = Int64 | Array(Int64)
+
+ # -------------------
+ # Insert / Delete
+ # -------------------
+
+ def insert(video : PlaylistVideo)
+ video_array = video.to_a
+
+ request = <<-SQL
+ INSERT INTO playlist_videos
+ VALUES (#{arg_array(video_array)})
+ SQL
+
+ PG_DB.exec(request, args: video_array)
+ end
+
+ def delete(index)
+ request = <<-SQL
+ DELETE FROM playlist_videos *
+ WHERE index = $1
+ SQL
+
+ PG_DB.exec(request, index)
+ end
+
+ def delete_by_playlist(plid : String)
+ request = <<-SQL
+ DELETE FROM playlist_videos *
+ WHERE plid = $1
+ SQL
+
+ PG_DB.exec(request, plid)
+ end
+
+ # -------------------
+ # Salect
+ # -------------------
+
+ def select(plid : String, index : VideoIndex, offset, limit = 100) : Array(PlaylistVideo)
+ request = <<-SQL
+ SELECT * FROM playlist_videos
+ WHERE plid = $1
+ ORDER BY array_position($2, index)
+ LIMIT $3
+ OFFSET $4
+ SQL
+
+ return PG_DB.query_all(request, plid, index, limit, offset, as: PlaylistVideo)
+ end
+
+ def select_index(plid : String, vid : String) : Int64?
+ request = <<-SQL
+ SELECT index FROM playlist_videos
+ WHERE plid = $1 AND id = $2
+ LIMIT 1
+ SQL
+
+ return PG_DB.query_one?(request, plid, vid, as: Int64)
+ end
+
+ def select_one_id(plid : String, index : VideoIndex) : String?
+ request = <<-SQL
+ SELECT id FROM playlist_videos
+ WHERE plid = $1
+ ORDER BY array_position($2, index)
+ LIMIT 1
+ SQL
+
+ return PG_DB.query_one?(request, plid, index, as: String)
+ end
+
+ def select_ids(plid : String, index : VideoIndex, limit = 500) : Array(String)
+ request = <<-SQL
+ SELECT id FROM playlist_videos
+ WHERE plid = $1
+ ORDER BY array_position($2, index)
+ LIMIT $3
+ SQL
+
+ return PG_DB.query_all(request, plid, index, limit, as: String)
+ end
+end
diff --git a/src/invidious/database/sessions.cr b/src/invidious/database/sessions.cr
new file mode 100644
index 00000000..96587082
--- /dev/null
+++ b/src/invidious/database/sessions.cr
@@ -0,0 +1,74 @@
+require "./base.cr"
+
+module Invidious::Database::SessionIDs
+ extend self
+
+ # -------------------
+ # Insert
+ # -------------------
+
+ def insert(sid : String, email : String, handle_conflicts : Bool = false)
+ request = <<-SQL
+ INSERT INTO session_ids
+ VALUES ($1, $2, now())
+ SQL
+
+ request += " ON CONFLICT (id) DO NOTHING" if handle_conflicts
+
+ PG_DB.exec(request, sid, email)
+ end
+
+ # -------------------
+ # Delete
+ # -------------------
+
+ def delete(*, sid : String)
+ request = <<-SQL
+ DELETE FROM session_ids *
+ WHERE id = $1
+ SQL
+
+ PG_DB.exec(request, sid)
+ end
+
+ def delete(*, email : String)
+ request = <<-SQL
+ DELETE FROM session_ids *
+ WHERE email = $1
+ SQL
+
+ PG_DB.exec(request, email)
+ end
+
+ def delete(*, sid : String, email : String)
+ request = <<-SQL
+ DELETE FROM session_ids *
+ WHERE id = $1 AND email = $2
+ SQL
+
+ PG_DB.exec(request, sid, email)
+ end
+
+ # -------------------
+ # Select
+ # -------------------
+
+ def select_email(sid : String) : String?
+ request = <<-SQL
+ SELECT email FROM session_ids
+ WHERE id = $1
+ SQL
+
+ PG_DB.query_one?(request, sid, as: String)
+ end
+
+ def select_all(email : String) : Array({session: String, issued: Time})
+ request = <<-SQL
+ SELECT id, issued FROM session_ids
+ WHERE email = $1
+ ORDER BY issued DESC
+ SQL
+
+ PG_DB.query_all(request, email, as: {session: String, issued: Time})
+ end
+end
diff --git a/src/invidious/database/statistics.cr b/src/invidious/database/statistics.cr
new file mode 100644
index 00000000..9e4963fd
--- /dev/null
+++ b/src/invidious/database/statistics.cr
@@ -0,0 +1,49 @@
+require "./base.cr"
+
+module Invidious::Database::Statistics
+ extend self
+
+ # -------------------
+ # User stats
+ # -------------------
+
+ def count_users_total : Int64
+ request = <<-SQL
+ SELECT count(*) FROM users
+ SQL
+
+ PG_DB.query_one(request, as: Int64)
+ end
+
+ def count_users_active_6m : Int64
+ request = <<-SQL
+ SELECT count(*) FROM users
+ WHERE CURRENT_TIMESTAMP - updated < '6 months'
+ SQL
+
+ PG_DB.query_one(request, as: Int64)
+ end
+
+ def count_users_active_1m : Int64
+ request = <<-SQL
+ SELECT count(*) FROM users
+ WHERE CURRENT_TIMESTAMP - updated < '1 month'
+ SQL
+
+ PG_DB.query_one(request, as: Int64)
+ end
+
+ # -------------------
+ # Channel stats
+ # -------------------
+
+ def channel_last_update : Time?
+ request = <<-SQL
+ SELECT updated FROM channels
+ ORDER BY updated DESC
+ LIMIT 1
+ SQL
+
+ PG_DB.query_one?(request, as: Time)
+ end
+end
diff --git a/src/invidious/database/users.cr b/src/invidious/database/users.cr
new file mode 100644
index 00000000..d54e6a76
--- /dev/null
+++ b/src/invidious/database/users.cr
@@ -0,0 +1,228 @@
+require "./base.cr"
+
+module Invidious::Database::Users
+ extend self
+
+ # -------------------
+ # Insert / delete
+ # -------------------
+
+ def insert(user : User, update_on_conflict : Bool = false)
+ user_array = user.to_a
+ user_array[4] = user_array[4].to_json # User preferences
+
+ request = <<-SQL
+ INSERT INTO users
+ VALUES (#{arg_array(user_array)})
+ SQL
+
+ if update_on_conflict
+ request += <<-SQL
+ ON CONFLICT (email) DO UPDATE
+ SET updated = $1, subscriptions = $3
+ SQL
+ end
+
+ PG_DB.exec(request, args: user_array)
+ end
+
+ def delete(user : User)
+ request = <<-SQL
+ DELETE FROM users *
+ WHERE email = $1
+ SQL
+
+ PG_DB.exec(request, user.email)
+ end
+
+ # -------------------
+ # Update (history)
+ # -------------------
+
+ def update_watch_history(user : User)
+ request = <<-SQL
+ UPDATE users
+ SET watched = $1
+ WHERE email = $2
+ SQL
+
+ PG_DB.exec(request, user.watched, user.email)
+ end
+
+ def mark_watched(user : User, vid : String)
+ request = <<-SQL
+ UPDATE users
+ SET watched = array_append(array_remove(watched, $1), $1)
+ WHERE email = $2
+ SQL
+
+ PG_DB.exec(request, vid, user.email)
+ end
+
+ def mark_unwatched(user : User, vid : String)
+ request = <<-SQL
+ UPDATE users
+ SET watched = array_remove(watched, $1)
+ WHERE email = $2
+ SQL
+
+ PG_DB.exec(request, vid, user.email)
+ end
+
+ def clear_watch_history(user : User)
+ request = <<-SQL
+ UPDATE users
+ SET watched = '{}'
+ WHERE email = $1
+ SQL
+
+ PG_DB.exec(request, user.email)
+ end
+
+ # -------------------
+ # Update (channels)
+ # -------------------
+
+ def update_subscriptions(user : User)
+ request = <<-SQL
+ UPDATE users
+ SET feed_needs_update = true, subscriptions = $1
+ WHERE email = $2
+ SQL
+
+ PG_DB.exec(request, user.subscriptions, user.email)
+ end
+
+ def subscribe_channel(user : User, ucid : String)
+ request = <<-SQL
+ UPDATE users
+ SET feed_needs_update = true,
+ subscriptions = array_append(subscriptions,$1)
+ WHERE email = $2
+ SQL
+
+ PG_DB.exec(request, ucid, user.email)
+ end
+
+ def unsubscribe_channel(user : User, ucid : String)
+ request = <<-SQL
+ UPDATE users
+ SET feed_needs_update = true,
+ subscriptions = array_remove(subscriptions, $1)
+ WHERE email = $2
+ SQL
+
+ PG_DB.exec(request, ucid, user.email)
+ end
+
+ # -------------------
+ # Update (notifs)
+ # -------------------
+
+ def add_notification(video : ChannelVideo)
+ request = <<-SQL
+ UPDATE users
+ SET notifications = array_append(notifications, $1),
+ feed_needs_update = true
+ WHERE $2 = ANY(subscriptions)
+ SQL
+
+ PG_DB.exec(request, video.id, video.ucid)
+ end
+
+ def remove_notification(user : User, vid : String)
+ request = <<-SQL
+ UPDATE users
+ SET notifications = array_remove(notifications, $1)
+ WHERE email = $2
+ SQL
+
+ PG_DB.exec(request, vid, user.email)
+ end
+
+ def clear_notifications(user : User)
+ request = <<-SQL
+ UPDATE users
+ SET notifications = '{}', updated = now()
+ WHERE email = $1
+ SQL
+
+ PG_DB.exec(request, user.email)
+ end
+
+ # -------------------
+ # Update (misc)
+ # -------------------
+
+ def feed_needs_update(video : ChannelVideo)
+ request = <<-SQL
+ UPDATE users
+ SET feed_needs_update = true
+ WHERE $1 = ANY(subscriptions)
+ SQL
+
+ PG_DB.exec(request, video.ucid)
+ end
+
+ def update_preferences(user : User)
+ request = <<-SQL
+ UPDATE users
+ SET preferences = $1
+ WHERE email = $2
+ SQL
+
+ PG_DB.exec(request, user.preferences.to_json, user.email)
+ end
+
+ def update_password(user : User, pass : String)
+ request = <<-SQL
+ UPDATE users
+ SET password = $1
+ WHERE email = $2
+ SQL
+
+ PG_DB.exec(request, pass, user.email)
+ end
+
+ # -------------------
+ # Select
+ # -------------------
+
+ def select(*, email : String) : User?
+ request = <<-SQL
+ SELECT * FROM users
+ WHERE email = $1
+ SQL
+
+ return PG_DB.query_one?(request, email, as: User)
+ end
+
+ # Same as select, but can raise an exception
+ def select!(*, email : String) : User
+ request = <<-SQL
+ SELECT * FROM users
+ WHERE email = $1
+ SQL
+
+ return PG_DB.query_one(request, email, as: User)
+ end
+
+ def select(*, token : String) : User?
+ request = <<-SQL
+ SELECT * FROM users
+ WHERE token = $1
+ SQL
+
+ return PG_DB.query_one?(request, token, as: User)
+ end
+
+ def select_notifications(user : User) : Array(String)
+ request = <<-SQL
+ SELECT notifications
+ FROM users
+ WHERE email = $1
+ SQL
+
+ return PG_DB.query_one(request, user.email, as: Array(String))
+ end
+end
diff --git a/src/invidious/database/videos.cr b/src/invidious/database/videos.cr
new file mode 100644
index 00000000..695f5b33
--- /dev/null
+++ b/src/invidious/database/videos.cr
@@ -0,0 +1,52 @@
+require "./base.cr"
+
+module Invidious::Database::Videos
+ extend self
+
+ def insert(video : Video)
+ request = <<-SQL
+ INSERT INTO videos
+ VALUES ($1, $2, $3)
+ ON CONFLICT (id) DO NOTHING
+ SQL
+
+ PG_DB.exec(request, video.id, video.info.to_json, video.updated)
+ end
+
+ def delete(id)
+ request = <<-SQL
+ DELETE FROM videos *
+ WHERE id = $1
+ SQL
+
+ PG_DB.exec(request, id)
+ end
+
+ def delete_expired
+ request = <<-SQL
+ DELETE FROM videos *
+ WHERE updated < (now() - interval '6 hours')
+ SQL
+
+ PG_DB.exec(request)
+ end
+
+ def update(video : Video)
+ request = <<-SQL
+ UPDATE videos
+ SET (id, info, updated) = ($1, $2, $3)
+ WHERE id = $1
+ SQL
+
+ PG_DB.exec(request, video.id, video.info.to_json, video.updated)
+ end
+
+ def select(id : String) : Video?
+ request = <<-SQL
+ SELECT * FROM videos
+ WHERE id = $1
+ SQL
+
+ return PG_DB.query_one?(request, id, as: Video)
+ end
+end
diff --git a/src/invidious/exceptions.cr b/src/invidious/exceptions.cr
new file mode 100644
index 00000000..690db907
--- /dev/null
+++ b/src/invidious/exceptions.cr
@@ -0,0 +1,40 @@
+# InfoExceptions are for displaying information to the user.
+#
+# An InfoException might or might not indicate that something went wrong.
+# Historically Invidious didn't differentiate between these two options, so to
+# maintain previous functionality InfoExceptions do not print backtraces.
+class InfoException < Exception
+end
+
+# Exception used to hold the bogus UCID during a channel search.
+class ChannelSearchException < InfoException
+ getter channel : String
+
+ def initialize(@channel)
+ end
+end
+
+# Exception used to hold the name of the missing item
+# Should be used in all parsing functions
+class BrokenTubeException < Exception
+ getter element : String
+
+ def initialize(@element)
+ end
+
+ def message
+ return "Missing JSON element \"#{@element}\""
+ end
+end
+
+# Exception threw when an element is not found.
+class NotFoundException < InfoException
+end
+
+class VideoNotAvailableException < Exception
+end
+
+# Exception used to indicate that the JSON response from YT is missing
+# some important informations, and that the query should be sent again.
+class RetryOnceException < Exception
+end
diff --git a/src/invidious/frontend/channel_page.cr b/src/invidious/frontend/channel_page.cr
new file mode 100644
index 00000000..fe7d6d6e
--- /dev/null
+++ b/src/invidious/frontend/channel_page.cr
@@ -0,0 +1,46 @@
+module Invidious::Frontend::ChannelPage
+ extend self
+
+ enum TabsAvailable
+ Videos
+ Shorts
+ Streams
+ Podcasts
+ Releases
+ Playlists
+ Community
+ Channels
+ end
+
+ def generate_tabs_links(locale : String, channel : AboutChannel, selected_tab : TabsAvailable)
+ return String.build(1500) do |str|
+ base_url = "/channel/#{channel.ucid}"
+
+ TabsAvailable.each do |tab|
+ # Ignore playlists, as it is not supported for auto-generated channels yet
+ next if (tab.playlists? && channel.auto_generated)
+
+ tab_name = tab.to_s.downcase
+
+ if channel.tabs.includes? tab_name
+ str << %(<div class="pure-u-1 pure-md-1-3">\n)
+
+ if tab == selected_tab
+ str << "\t<b>"
+ str << translate(locale, "channel_tab_#{tab_name}_label")
+ str << "</b>\n"
+ else
+ # Video tab doesn't have the last path component
+ url = tab.videos? ? base_url : "#{base_url}/#{tab_name}"
+
+ str << %(\t<a href=") << url << %(">)
+ str << translate(locale, "channel_tab_#{tab_name}_label")
+ str << "</a>\n"
+ end
+
+ str << "</div>"
+ end
+ end
+ end
+ end
+end
diff --git a/src/invidious/frontend/comments_reddit.cr b/src/invidious/frontend/comments_reddit.cr
new file mode 100644
index 00000000..4dda683e
--- /dev/null
+++ b/src/invidious/frontend/comments_reddit.cr
@@ -0,0 +1,50 @@
+module Invidious::Frontend::Comments
+ extend self
+
+ def template_reddit(root, locale)
+ String.build do |html|
+ root.each do |child|
+ if child.data.is_a?(RedditComment)
+ child = child.data.as(RedditComment)
+ body_html = HTML.unescape(child.body_html)
+
+ replies_html = ""
+ if child.replies.is_a?(RedditThing)
+ replies = child.replies.as(RedditThing)
+ replies_html = self.template_reddit(replies.data.as(RedditListing).children, locale)
+ end
+
+ if child.depth > 0
+ html << <<-END_HTML
+ <div class="pure-g">
+ <div class="pure-u-1-24">
+ </div>
+ <div class="pure-u-23-24">
+ END_HTML
+ else
+ html << <<-END_HTML
+ <div class="pure-g">
+ <div class="pure-u-1">
+ END_HTML
+ end
+
+ html << <<-END_HTML
+ <p>
+ <a href="javascript:void(0)" data-onclick="toggle_parent">[ − ]</a>
+ <b><a href="https://www.reddit.com/user/#{child.author}">#{child.author}</a></b>
+ #{translate_count(locale, "comments_points_count", child.score, NumberFormatting::Separator)}
+ <span title="#{child.created_utc.to_s("%a %B %-d %T %Y UTC")}">#{translate(locale, "`x` ago", recode_date(child.created_utc, locale))}</span>
+ <a href="https://www.reddit.com#{child.permalink}" title="#{translate(locale, "permalink")}">#{translate(locale, "permalink")}</a>
+ </p>
+ <div>
+ #{body_html}
+ #{replies_html}
+ </div>
+ </div>
+ </div>
+ END_HTML
+ end
+ end
+ end
+ end
+end
diff --git a/src/invidious/frontend/comments_youtube.cr b/src/invidious/frontend/comments_youtube.cr
new file mode 100644
index 00000000..a0e1d783
--- /dev/null
+++ b/src/invidious/frontend/comments_youtube.cr
@@ -0,0 +1,208 @@
+module Invidious::Frontend::Comments
+ extend self
+
+ def template_youtube(comments, locale, thin_mode, is_replies = false)
+ String.build do |html|
+ root = comments["comments"].as_a
+ root.each do |child|
+ if child["replies"]?
+ replies_count_text = translate_count(locale,
+ "comments_view_x_replies",
+ child["replies"]["replyCount"].as_i64 || 0,
+ NumberFormatting::Separator
+ )
+
+ replies_html = <<-END_HTML
+ <div id="replies" class="pure-g">
+ <div class="pure-u-1-24"></div>
+ <div class="pure-u-23-24">
+ <p>
+ <a href="javascript:void(0)" data-continuation="#{child["replies"]["continuation"]}"
+ data-onclick="get_youtube_replies" data-load-replies>#{replies_count_text}</a>
+ </p>
+ </div>
+ </div>
+ END_HTML
+ elsif comments["authorId"]? && !comments["singlePost"]?
+ # for posts we should display a link to the post
+ replies_count_text = translate_count(locale,
+ "comments_view_x_replies",
+ child["replyCount"].as_i64 || 0,
+ NumberFormatting::Separator
+ )
+
+ replies_html = <<-END_HTML
+ <div class="pure-g">
+ <div class="pure-u-1-24"></div>
+ <div class="pure-u-23-24">
+ <p>
+ <a href="/post/#{child["commentId"]}?ucid=#{comments["authorId"]}">#{replies_count_text}</a>
+ </p>
+ </div>
+ </div>
+ END_HTML
+ end
+
+ if !thin_mode
+ author_thumbnail = "/ggpht#{URI.parse(child["authorThumbnails"][-1]["url"].as_s).request_target}"
+ else
+ author_thumbnail = ""
+ end
+
+ author_name = HTML.escape(child["author"].as_s)
+ sponsor_icon = ""
+ if child["verified"]?.try &.as_bool && child["authorIsChannelOwner"]?.try &.as_bool
+ author_name += "&nbsp;<i class=\"icon ion ion-md-checkmark-circle\"></i>"
+ elsif child["verified"]?.try &.as_bool
+ author_name += "&nbsp;<i class=\"icon ion ion-md-checkmark\"></i>"
+ end
+
+ if child["isSponsor"]?.try &.as_bool
+ sponsor_icon = String.build do |str|
+ str << %(<img alt="" )
+ str << %(src="/ggpht) << URI.parse(child["sponsorIconUrl"].as_s).request_target << "\" "
+ str << %(title=") << translate(locale, "Channel Sponsor") << "\" "
+ str << %(width="16" height="16" />)
+ end
+ end
+ html << <<-END_HTML
+ <div class="pure-g" style="width:100%">
+ <div class="channel-profile pure-u-4-24 pure-u-md-2-24">
+ <img loading="lazy" style="margin-right:1em;margin-top:1em;width:90%" src="#{author_thumbnail}" alt="" />
+ </div>
+ <div class="pure-u-20-24 pure-u-md-22-24">
+ <p>
+ <b>
+ <a class="#{child["authorIsChannelOwner"] == true ? "channel-owner" : ""}" href="#{child["authorUrl"]}">#{author_name}</a>
+ </b>
+ #{sponsor_icon}
+ <p style="white-space:pre-wrap">#{child["contentHtml"]}</p>
+ END_HTML
+
+ if child["attachment"]?
+ attachment = child["attachment"]
+
+ case attachment["type"]
+ when "image"
+ attachment = attachment["imageThumbnails"][1]
+
+ html << <<-END_HTML
+ <div class="pure-g">
+ <div class="pure-u-1 pure-u-md-1-2">
+ <img loading="lazy" style="width:100%" src="/ggpht#{URI.parse(attachment["url"].as_s).request_target}" alt="" />
+ </div>
+ </div>
+ END_HTML
+ when "video"
+ if attachment["error"]?
+ html << <<-END_HTML
+ <div class="pure-g video-iframe-wrapper">
+ <p>#{attachment["error"]}</p>
+ </div>
+ END_HTML
+ else
+ html << <<-END_HTML
+ <div class="pure-g video-iframe-wrapper">
+ <iframe class="video-iframe" src='/embed/#{attachment["videoId"]?}?autoplay=0'></iframe>
+ </div>
+ END_HTML
+ end
+ when "multiImage"
+ html << <<-END_HTML
+ <section class="carousel">
+ <a class="skip-link" href="#skip-#{child["commentId"]}">#{translate(locale, "carousel_skip")}</a>
+ <div class="slides">
+ END_HTML
+ image_array = attachment["images"].as_a
+
+ image_array.each_index do |i|
+ html << <<-END_HTML
+ <div class="slides-item slide-#{i + 1}" id="#{child["commentId"]}-slide-#{i + 1}" aria-label="#{translate(locale, "carousel_slide", {"current" => (i + 1).to_s, "total" => image_array.size.to_s})}" tabindex="0">
+ <img loading="lazy" src="/ggpht#{URI.parse(image_array[i][1]["url"].as_s).request_target}" alt="" />
+ </div>
+ END_HTML
+ end
+
+ html << <<-END_HTML
+ </div>
+ <div class="carousel__nav">
+ END_HTML
+ attachment["images"].as_a.each_index do |i|
+ html << <<-END_HTML
+ <a class="slider-nav" href="##{child["commentId"]}-slide-#{i + 1}" aria-label="#{translate(locale, "carousel_go_to", (i + 1).to_s)}" tabindex="-1" aria-hidden="true">#{i + 1}</a>
+ END_HTML
+ end
+ html << <<-END_HTML
+ </div>
+ <div id="skip-#{child["commentId"]}"></div>
+ </section>
+ END_HTML
+ else nil # Ignore
+ end
+ end
+
+ html << <<-END_HTML
+ <p>
+ <span title="#{Time.unix(child["published"].as_i64).to_s(translate(locale, "%A %B %-d, %Y"))}">#{translate(locale, "`x` ago", recode_date(Time.unix(child["published"].as_i64), locale))} #{child["isEdited"] == true ? translate(locale, "(edited)") : ""}</span>
+ |
+ END_HTML
+
+ if comments["videoId"]?
+ html << <<-END_HTML
+ <a rel="noreferrer noopener" href="https://www.youtube.com/watch?v=#{comments["videoId"]}&lc=#{child["commentId"]}" title="#{translate(locale, "YouTube comment permalink")}">[YT]</a>
+ |
+ END_HTML
+ elsif comments["authorId"]?
+ html << <<-END_HTML
+ <a rel="noreferrer noopener" href="https://www.youtube.com/channel/#{comments["authorId"]}/community?lb=#{child["commentId"]}" title="#{translate(locale, "YouTube comment permalink")}">[YT]</a>
+ |
+ END_HTML
+ end
+
+ html << <<-END_HTML
+ <i class="icon ion-ios-thumbs-up"></i> #{number_with_separator(child["likeCount"])}
+ END_HTML
+
+ if child["creatorHeart"]?
+ if !thin_mode
+ creator_thumbnail = "/ggpht#{URI.parse(child["creatorHeart"]["creatorThumbnail"].as_s).request_target}"
+ else
+ creator_thumbnail = ""
+ end
+
+ html << <<-END_HTML
+ &nbsp;
+ <span class="creator-heart-container" title="#{translate(locale, "`x` marked it with a ❤", child["creatorHeart"]["creatorName"].as_s)}">
+ <span class="creator-heart">
+ <img loading="lazy" class="creator-heart-background-hearted" src="#{creator_thumbnail}" alt="" />
+ <span class="creator-heart-small-hearted">
+ <span class="icon ion-ios-heart creator-heart-small-container"></span>
+ </span>
+ </span>
+ </span>
+ END_HTML
+ end
+
+ html << <<-END_HTML
+ </p>
+ #{replies_html}
+ </div>
+ </div>
+ END_HTML
+ end
+
+ if comments["continuation"]?
+ html << <<-END_HTML
+ <div class="pure-g">
+ <div class="pure-u-1">
+ <p>
+ <a href="javascript:void(0)" data-continuation="#{comments["continuation"]}"
+ data-onclick="get_youtube_replies" data-load-more #{"data-load-replies" if is_replies}>#{translate(locale, "Load more")}</a>
+ </p>
+ </div>
+ </div>
+ END_HTML
+ end
+ end
+ end
+end
diff --git a/src/invidious/frontend/misc.cr b/src/invidious/frontend/misc.cr
new file mode 100644
index 00000000..7a6cf79d
--- /dev/null
+++ b/src/invidious/frontend/misc.cr
@@ -0,0 +1,14 @@
+module Invidious::Frontend::Misc
+ extend self
+
+ def redirect_url(env : HTTP::Server::Context)
+ prefs = env.get("preferences").as(Preferences)
+
+ if prefs.automatic_instance_redirect
+ current_page = env.get?("current_page").as(String)
+ return "/redirect?referer=#{current_page}"
+ else
+ return "https://redirect.invidious.io#{env.request.resource}"
+ end
+ end
+end
diff --git a/src/invidious/frontend/pagination.cr b/src/invidious/frontend/pagination.cr
new file mode 100644
index 00000000..3f931f4e
--- /dev/null
+++ b/src/invidious/frontend/pagination.cr
@@ -0,0 +1,97 @@
+require "uri"
+
+module Invidious::Frontend::Pagination
+ extend self
+
+ private def previous_page(str : String::Builder, locale : String?, url : String)
+ # Link
+ str << %(<a href=") << url << %(" class="pure-button pure-button-secondary">)
+
+ if locale_is_rtl?(locale)
+ # Inverted arrow ("previous" points to the right)
+ str << translate(locale, "Previous page")
+ str << "&nbsp;&nbsp;"
+ str << %(<i class="icon ion-ios-arrow-forward"></i>)
+ else
+ # Regular arrow ("previous" points to the left)
+ str << %(<i class="icon ion-ios-arrow-back"></i>)
+ str << "&nbsp;&nbsp;"
+ str << translate(locale, "Previous page")
+ end
+
+ str << "</a>"
+ end
+
+ private def next_page(str : String::Builder, locale : String?, url : String)
+ # Link
+ str << %(<a href=") << url << %(" class="pure-button pure-button-secondary">)
+
+ if locale_is_rtl?(locale)
+ # Inverted arrow ("next" points to the left)
+ str << %(<i class="icon ion-ios-arrow-back"></i>)
+ str << "&nbsp;&nbsp;"
+ str << translate(locale, "Next page")
+ else
+ # Regular arrow ("next" points to the right)
+ str << translate(locale, "Next page")
+ str << "&nbsp;&nbsp;"
+ str << %(<i class="icon ion-ios-arrow-forward"></i>)
+ end
+
+ str << "</a>"
+ end
+
+ def nav_numeric(locale : String?, *, base_url : String | URI, current_page : Int, show_next : Bool = true)
+ return String.build do |str|
+ str << %(<div class="h-box">\n)
+ str << %(<div class="page-nav-container flexible">\n)
+
+ str << %(<div class="page-prev-container flex-left">)
+
+ if current_page > 1
+ params_prev = URI::Params{"page" => (current_page - 1).to_s}
+ url_prev = HttpServer::Utils.add_params_to_url(base_url, params_prev)
+
+ self.previous_page(str, locale, url_prev.to_s)
+ end
+
+ str << %(</div>\n)
+ str << %(<div class="page-next-container flex-right">)
+
+ if show_next
+ params_next = URI::Params{"page" => (current_page + 1).to_s}
+ url_next = HttpServer::Utils.add_params_to_url(base_url, params_next)
+
+ self.next_page(str, locale, url_next.to_s)
+ end
+
+ str << %(</div>\n)
+
+ str << %(</div>\n)
+ str << %(</div>\n\n)
+ end
+ end
+
+ def nav_ctoken(locale : String?, *, base_url : String | URI, ctoken : String?)
+ return String.build do |str|
+ str << %(<div class="h-box">\n)
+ str << %(<div class="page-nav-container flexible">\n)
+
+ str << %(<div class="page-prev-container flex-left"></div>\n)
+
+ str << %(<div class="page-next-container flex-right">)
+
+ if !ctoken.nil?
+ params_next = URI::Params{"continuation" => ctoken}
+ url_next = HttpServer::Utils.add_params_to_url(base_url, params_next)
+
+ self.next_page(str, locale, url_next.to_s)
+ end
+
+ str << %(</div>\n)
+
+ str << %(</div>\n)
+ str << %(</div>\n\n)
+ end
+ end
+end
diff --git a/src/invidious/frontend/search_filters.cr b/src/invidious/frontend/search_filters.cr
new file mode 100644
index 00000000..8ac0af2e
--- /dev/null
+++ b/src/invidious/frontend/search_filters.cr
@@ -0,0 +1,135 @@
+module Invidious::Frontend::SearchFilters
+ extend self
+
+ # Generate the search filters collapsable widget.
+ def generate(filters : Search::Filters, query : String, page : Int, locale : String) : String
+ return String.build(8000) do |str|
+ str << "<div id='filters'>\n"
+ str << "\t<details id='filters-collapse'>"
+ str << "\t\t<summary>" << translate(locale, "search_filters_title") << "</summary>\n"
+
+ str << "\t\t<div id='filters-box'><form action='/search' method='get'>\n"
+
+ str << "\t\t\t<input type='hidden' name='q' value='" << HTML.escape(query) << "'>\n"
+ str << "\t\t\t<input type='hidden' name='page' value='" << page << "'>\n"
+
+ str << "\t\t\t<div id='filters-flex'>"
+
+ filter_wrapper(date)
+ filter_wrapper(type)
+ filter_wrapper(duration)
+ filter_wrapper(features)
+ filter_wrapper(sort)
+
+ str << "\t\t\t</div>\n"
+
+ str << "\t\t\t<div id='filters-apply'>"
+ str << "<button type='submit' class=\"pure-button pure-button-primary\">"
+ str << translate(locale, "search_filters_apply_button")
+ str << "</button></div>\n"
+
+ str << "\t\t</form></div>\n"
+
+ str << "\t</details>\n"
+ str << "</div>\n"
+ end
+ end
+
+ # Generate wrapper HTML (`<div>`, filter name, etc...) around the
+ # `<input>` elements of a search filter
+ macro filter_wrapper(name)
+ str << "\t\t\t\t<div class=\"filter-column\"><fieldset>\n"
+
+ str << "\t\t\t\t\t<legend><div class=\"filter-name underlined\">"
+ str << translate(locale, "search_filters_{{name}}_label")
+ str << "</div></legend>\n"
+
+ str << "\t\t\t\t\t<div class=\"filter-options\">\n"
+ make_{{name}}_filter_options(str, filters.{{name}}, locale)
+ str << "\t\t\t\t\t</div>"
+
+ str << "\t\t\t\t</fieldset></div>\n"
+ end
+
+ # Generates the HTML for the list of radio buttons of the "date" search filter
+ def make_date_filter_options(str : String::Builder, value : Search::Filters::Date, locale : String)
+ {% for value in Invidious::Search::Filters::Date.constants %}
+ {% date = value.underscore %}
+
+ str << "\t\t\t\t\t\t<div>"
+ str << "<input type='radio' name='date' id='filter-date-{{date}}' value='{{date}}'"
+ str << " checked" if value.{{date}}?
+ str << '>'
+
+ str << "<label for='filter-date-{{date}}'>"
+ str << translate(locale, "search_filters_date_option_{{date}}")
+ str << "</label></div>\n"
+ {% end %}
+ end
+
+ # Generates the HTML for the list of radio buttons of the "type" search filter
+ def make_type_filter_options(str : String::Builder, value : Search::Filters::Type, locale : String)
+ {% for value in Invidious::Search::Filters::Type.constants %}
+ {% type = value.underscore %}
+
+ str << "\t\t\t\t\t\t<div>"
+ str << "<input type='radio' name='type' id='filter-type-{{type}}' value='{{type}}'"
+ str << " checked" if value.{{type}}?
+ str << '>'
+
+ str << "<label for='filter-type-{{type}}'>"
+ str << translate(locale, "search_filters_type_option_{{type}}")
+ str << "</label></div>\n"
+ {% end %}
+ end
+
+ # Generates the HTML for the list of radio buttons of the "duration" search filter
+ def make_duration_filter_options(str : String::Builder, value : Search::Filters::Duration, locale : String)
+ {% for value in Invidious::Search::Filters::Duration.constants %}
+ {% duration = value.underscore %}
+
+ str << "\t\t\t\t\t\t<div>"
+ str << "<input type='radio' name='duration' id='filter-duration-{{duration}}' value='{{duration}}'"
+ str << " checked" if value.{{duration}}?
+ str << '>'
+
+ str << "<label for='filter-duration-{{duration}}'>"
+ str << translate(locale, "search_filters_duration_option_{{duration}}")
+ str << "</label></div>\n"
+ {% end %}
+ end
+
+ # Generates the HTML for the list of checkboxes of the "features" search filter
+ def make_features_filter_options(str : String::Builder, value : Search::Filters::Features, locale : String)
+ {% for value in Invidious::Search::Filters::Features.constants %}
+ {% if value.stringify != "All" && value.stringify != "None" %}
+ {% feature = value.underscore %}
+
+ str << "\t\t\t\t\t\t<div>"
+ str << "<input type='checkbox' name='features' id='filter-feature-{{feature}}' value='{{feature}}'"
+ str << " checked" if value.{{feature}}?
+ str << '>'
+
+ str << "<label for='filter-feature-{{feature}}'>"
+ str << translate(locale, "search_filters_features_option_{{feature}}")
+ str << "</label></div>\n"
+ {% end %}
+ {% end %}
+ end
+
+ # Generates the HTML for the list of radio buttons of the "sort" search filter
+ def make_sort_filter_options(str : String::Builder, value : Search::Filters::Sort, locale : String)
+ {% for value in Invidious::Search::Filters::Sort.constants %}
+ {% sort = value.underscore %}
+
+ str << "\t\t\t\t\t\t<div>"
+ str << "<input type='radio' name='sort' id='filter-sort-{{sort}}' value='{{sort}}'"
+ str << " checked" if value.{{sort}}?
+ str << '>'
+
+ str << "<label for='filter-sort-{{sort}}'>"
+ str << translate(locale, "search_filters_sort_option_{{sort}}")
+ str << "</label></div>\n"
+ {% end %}
+ end
+end
diff --git a/src/invidious/frontend/watch_page.cr b/src/invidious/frontend/watch_page.cr
new file mode 100644
index 00000000..c8cb7110
--- /dev/null
+++ b/src/invidious/frontend/watch_page.cr
@@ -0,0 +1,107 @@
+module Invidious::Frontend::WatchPage
+ extend self
+
+ # A handy structure to pass many elements at
+ # once to the download widget function
+ struct VideoAssets
+ getter full_videos : Array(Hash(String, JSON::Any))
+ getter video_streams : Array(Hash(String, JSON::Any))
+ getter audio_streams : Array(Hash(String, JSON::Any))
+ getter captions : Array(Invidious::Videos::Captions::Metadata)
+
+ def initialize(
+ @full_videos,
+ @video_streams,
+ @audio_streams,
+ @captions
+ )
+ end
+ end
+
+ def download_widget(locale : String, video : Video, video_assets : VideoAssets) : String
+ if CONFIG.disabled?("downloads")
+ return "<p id=\"download\">#{translate(locale, "Download is disabled")}</p>"
+ end
+
+ return String.build(4000) do |str|
+ str << "<form"
+ str << " class=\"pure-form pure-form-stacked\""
+ str << " action='/download'"
+ str << " method='post'"
+ str << " rel='noopener'"
+ str << " target='_blank'>"
+ str << '\n'
+
+ # Hidden inputs for video id and title
+ str << "<input type='hidden' name='id' value='" << video.id << "'/>\n"
+ str << "<input type='hidden' name='title' value='" << HTML.escape(video.title) << "'/>\n"
+
+ str << "\t<div class=\"pure-control-group\">\n"
+
+ str << "\t\t<label for='download_widget'>"
+ str << translate(locale, "Download as: ")
+ str << "</label>\n"
+
+ str << "\t\t<select name='download_widget' id='download_widget'>\n"
+
+ # Non-DASH videos (audio+video)
+
+ video_assets.full_videos.each do |option|
+ mimetype = option["mimeType"].as_s.split(";")[0]
+
+ height = Invidious::Videos::Formats.itag_to_metadata?(option["itag"]).try &.["height"]?
+
+ value = {"itag": option["itag"], "ext": mimetype.split("/")[1]}.to_json
+
+ str << "\t\t\t<option value='" << value << "'>"
+ str << (height || "~240") << "p - " << mimetype
+ str << "</option>\n"
+ end
+
+ # DASH video streams
+
+ video_assets.video_streams.each do |option|
+ mimetype = option["mimeType"].as_s.split(";")[0]
+
+ value = {"itag": option["itag"], "ext": mimetype.split("/")[1]}.to_json
+
+ str << "\t\t\t<option value='" << value << "'>"
+ str << option["qualityLabel"] << " - " << mimetype << " @ " << option["fps"] << "fps - video only"
+ str << "</option>\n"
+ end
+
+ # DASH audio streams
+
+ video_assets.audio_streams.each do |option|
+ mimetype = option["mimeType"].as_s.split(";")[0]
+
+ value = {"itag": option["itag"], "ext": mimetype.split("/")[1]}.to_json
+
+ str << "\t\t\t<option value='" << value << "'>"
+ str << mimetype << " @ " << (option["bitrate"]?.try &.as_i./ 1000) << "k - audio only"
+ str << "</option>\n"
+ end
+
+ # Subtitles (a.k.a "closed captions")
+
+ video_assets.captions.each do |caption|
+ value = {"label": caption.name, "ext": "#{caption.language_code}.vtt"}.to_json
+
+ str << "\t\t\t<option value='" << value << "'>"
+ str << translate(locale, "download_subtitles", translate(locale, caption.name))
+ str << "</option>\n"
+ end
+
+ # End of form
+
+ str << "\t\t</select>\n"
+ str << "\t</div>\n"
+
+ str << "\t<button type=\"submit\" class=\"pure-button pure-button-primary\">\n"
+ str << "\t\t<b>" << translate(locale, "Download") << "</b>\n"
+ str << "\t</button>\n"
+
+ str << "</form>\n"
+ end
+ end
+end
diff --git a/src/invidious/hashtag.cr b/src/invidious/hashtag.cr
new file mode 100644
index 00000000..d9d584c9
--- /dev/null
+++ b/src/invidious/hashtag.cr
@@ -0,0 +1,42 @@
+module Invidious::Hashtag
+ extend self
+
+ def fetch(hashtag : String, page : Int, region : String? = nil) : Array(SearchItem)
+ cursor = (page - 1) * 60
+ ctoken = generate_continuation(hashtag, cursor)
+
+ client_config = YoutubeAPI::ClientConfig.new(region: region)
+ response = YoutubeAPI.browse(continuation: ctoken, client_config: client_config)
+
+ items, _ = extract_items(response)
+ return items
+ end
+
+ def generate_continuation(hashtag : String, cursor : Int)
+ object = {
+ "80226972:embedded" => {
+ "2:string" => "FEhashtag",
+ "3:base64" => {
+ "1:varint" => 60_i64, # result count
+ "15:base64" => {
+ "1:varint" => cursor.to_i64,
+ "2:varint" => 0_i64,
+ },
+ "93:2:embedded" => {
+ "1:string" => hashtag,
+ "2:varint" => 0_i64,
+ "3:varint" => 1_i64,
+ },
+ },
+ "35:string" => "browse-feedFEhashtag",
+ },
+ }
+
+ continuation = object.try { |i| Protodec::Any.cast_json(i) }
+ .try { |i| Protodec::Any.from_json(i) }
+ .try { |i| Base64.urlsafe_encode(i) }
+ .try { |i| URI.encode_www_form(i) }
+
+ return continuation
+ end
+end
diff --git a/src/invidious/helpers/crystal_class_overrides.cr b/src/invidious/helpers/crystal_class_overrides.cr
new file mode 100644
index 00000000..3040d7a0
--- /dev/null
+++ b/src/invidious/helpers/crystal_class_overrides.cr
@@ -0,0 +1,104 @@
+# Override of the TCPSocket and HTTP::Client classes in order to allow an
+# IP family to be selected for domains that resolve to both IPv4 and
+# IPv6 addresses.
+#
+class TCPSocket
+ def initialize(host, port, dns_timeout = nil, connect_timeout = nil, blocking = false, family = Socket::Family::UNSPEC)
+ Addrinfo.tcp(host, port, timeout: dns_timeout, family: family) do |addrinfo|
+ super(addrinfo.family, addrinfo.type, addrinfo.protocol, blocking)
+ connect(addrinfo, timeout: connect_timeout) do |error|
+ close
+ error
+ end
+ end
+ end
+end
+
+# :ditto:
+class HTTP::Client
+ property family : Socket::Family = Socket::Family::UNSPEC
+
+ # Override stdlib to automatically initialize proxy if configured
+ #
+ # Accurate as of crystal 1.12.1
+
+ def initialize(@host : String, port = nil, tls : TLSContext = nil)
+ check_host_only(@host)
+
+ {% if flag?(:without_openssl) %}
+ if tls
+ raise "HTTP::Client TLS is disabled because `-D without_openssl` was passed at compile time"
+ end
+ @tls = nil
+ {% else %}
+ @tls = case tls
+ when true
+ OpenSSL::SSL::Context::Client.new
+ when OpenSSL::SSL::Context::Client
+ tls
+ when false, nil
+ nil
+ end
+ {% end %}
+
+ @port = (port || (@tls ? 443 : 80)).to_i
+
+ self.proxy = make_configured_http_proxy_client() if CONFIG.http_proxy
+ end
+
+ def initialize(@io : IO, @host = "", @port = 80)
+ @reconnect = false
+
+ self.proxy = make_configured_http_proxy_client() if CONFIG.http_proxy
+ end
+
+ private def io
+ io = @io
+ return io if io
+ unless @reconnect
+ raise "This HTTP::Client cannot be reconnected"
+ end
+
+ hostname = @host.starts_with?('[') && @host.ends_with?(']') ? @host[1..-2] : @host
+ io = TCPSocket.new hostname, @port, @dns_timeout, @connect_timeout, family: @family
+ io.read_timeout = @read_timeout if @read_timeout
+ io.write_timeout = @write_timeout if @write_timeout
+ io.sync = false
+
+ {% if !flag?(:without_openssl) %}
+ if tls = @tls
+ tcp_socket = io
+ begin
+ io = OpenSSL::SSL::Socket::Client.new(tcp_socket, context: tls, sync_close: true, hostname: @host.rchop('.'))
+ rescue exc
+ # don't leak the TCP socket when the SSL connection failed
+ tcp_socket.close
+ raise exc
+ end
+ end
+ {% end %}
+
+ @io = io
+ end
+end
+
+# Mute the ClientError exception raised when a connection is flushed.
+# This happends when the connection is unexpectedly closed by the client.
+#
+class HTTP::Server::Response
+ class Output
+ private def unbuffered_flush
+ @io.flush
+ rescue ex : IO::Error
+ unbuffered_close
+ end
+ end
+end
+
+# TODO: Document this override
+#
+class PG::ResultSet
+ def field(index = @column_index)
+ @fields.not_nil![index]
+ end
+end
diff --git a/src/invidious/helpers/errors.cr b/src/invidious/helpers/errors.cr
index 68ced430..b7643194 100644
--- a/src/invidious/helpers/errors.cr
+++ b/src/invidious/helpers/errors.cr
@@ -1,13 +1,9 @@
-# InfoExceptions are for displaying information to the user.
-#
-# An InfoException might or might not indicate that something went wrong.
-# Historically Invidious didn't differentiate between these two options, so to
-# maintain previous functionality InfoExceptions do not print backtraces.
-class InfoException < Exception
-end
+# -------------------
+# Issue template
+# -------------------
macro error_template(*args)
- error_template_helper(env, locale, {{*args}})
+ error_template_helper(env, {{args.splat}})
end
def github_details(summary : String, content : String)
@@ -22,84 +18,185 @@ def github_details(summary : String, content : String)
return HTML.escape(details)
end
-def error_template_helper(env : HTTP::Server::Context, locale : Hash(String, JSON::Any) | Nil, status_code : Int32, exception : Exception)
+def error_template_helper(env : HTTP::Server::Context, status_code : Int32, exception : Exception)
if exception.is_a?(InfoException)
- return error_template_helper(env, locale, status_code, exception.message || "")
+ return error_template_helper(env, status_code, exception.message || "")
end
+
+ locale = env.get("preferences").as(Preferences).locale
+
env.response.content_type = "text/html"
env.response.status_code = status_code
- issue_template = %(Title: `#{exception.message} (#{exception.class})`)
- issue_template += %(\nDate: `#{Time::Format::ISO_8601_DATE_TIME.format(Time.utc)}`)
- issue_template += %(\nRoute: `#{env.request.resource}`)
- issue_template += %(\nVersion: `#{SOFTWARE["version"]} @ #{SOFTWARE["branch"]}`)
- # issue_template += github_details("Preferences", env.get("preferences").as(Preferences).to_pretty_json)
+
+ issue_title = "#{exception.message} (#{exception.class})"
+
+ issue_template = <<-TEXT
+ Title: `#{HTML.escape(issue_title)}`
+ Date: `#{Time::Format::ISO_8601_DATE_TIME.format(Time.utc)}`
+ Route: `#{HTML.escape(env.request.resource)}`
+ Version: `#{SOFTWARE["version"]} @ #{SOFTWARE["branch"]}`
+
+ TEXT
+
issue_template += github_details("Backtrace", exception.inspect_with_backtrace)
+
+ # URLs for the error message below
+ url_faq = "https://github.com/iv-org/documentation/blob/master/docs/faq.md"
+ url_search_issues = "https://github.com/iv-org/invidious/issues"
+ url_search_issues += "?q=is:issue+is:open+"
+ url_search_issues += URI.encode_www_form("[Bug] #{issue_title}")
+
+ url_switch = "https://redirect.invidious.io" + env.request.resource
+
+ url_new_issue = "https://github.com/iv-org/invidious/issues/new"
+ url_new_issue += "?labels=bug&template=bug_report.md&title="
+ url_new_issue += URI.encode_www_form("[Bug] " + issue_title)
+
error_message = <<-END_HTML
- Looks like you've found a bug in Invidious. Please open a new issue
- <a href="https://github.com/iv-org/invidious/issues">on GitHub</a>
- and include the following text in your message:
- <pre style="padding: 20px; background: rgba(0, 0, 0, 0.12345);">#{issue_template}</pre>
+ <div class="error_message">
+ <h2>#{translate(locale, "crash_page_you_found_a_bug")}</h2>
+ <br/><br/>
+
+ <p><b>#{translate(locale, "crash_page_before_reporting")}</b></p>
+ <ul>
+ <li>#{translate(locale, "crash_page_refresh", env.request.resource)}</li>
+ <li>#{translate(locale, "crash_page_switch_instance", url_switch)}</li>
+ <li>#{translate(locale, "crash_page_read_the_faq", url_faq)}</li>
+ <li>#{translate(locale, "crash_page_search_issue", url_search_issues)}</li>
+ </ul>
+
+ <br/>
+ <p>#{translate(locale, "crash_page_report_issue", url_new_issue)}</p>
+
+ <!-- TODO: Add a "copy to clipboard" button -->
+ <pre style="padding: 20px; background: rgba(0, 0, 0, 0.12345);">#{issue_template}</pre>
+ </div>
END_HTML
+
+ # Don't show the usual "next steps" widget. The same options are
+ # proposed above the error message, just worded differently.
+ next_steps = ""
+
return templated "error"
end
-def error_template_helper(env : HTTP::Server::Context, locale : Hash(String, JSON::Any) | Nil, status_code : Int32, message : String)
+def error_template_helper(env : HTTP::Server::Context, status_code : Int32, message : String)
env.response.content_type = "text/html"
env.response.status_code = status_code
+
+ locale = env.get("preferences").as(Preferences).locale
+
error_message = translate(locale, message)
+ next_steps = error_redirect_helper(env)
+
return templated "error"
end
+# -------------------
+# Atom feeds
+# -------------------
+
macro error_atom(*args)
- error_atom_helper(env, locale, {{*args}})
+ error_atom_helper(env, {{args.splat}})
end
-def error_atom_helper(env : HTTP::Server::Context, locale : Hash(String, JSON::Any) | Nil, status_code : Int32, exception : Exception)
+def error_atom_helper(env : HTTP::Server::Context, status_code : Int32, exception : Exception)
if exception.is_a?(InfoException)
- return error_atom_helper(env, locale, status_code, exception.message || "")
+ return error_atom_helper(env, status_code, exception.message || "")
end
+
env.response.content_type = "application/atom+xml"
env.response.status_code = status_code
+
return "<error>#{exception.inspect_with_backtrace}</error>"
end
-def error_atom_helper(env : HTTP::Server::Context, locale : Hash(String, JSON::Any) | Nil, status_code : Int32, message : String)
+def error_atom_helper(env : HTTP::Server::Context, status_code : Int32, message : String)
env.response.content_type = "application/atom+xml"
env.response.status_code = status_code
+
return "<error>#{message}</error>"
end
+# -------------------
+# JSON
+# -------------------
+
macro error_json(*args)
- error_json_helper(env, locale, {{*args}})
+ error_json_helper(env, {{args.splat}})
end
-def error_json_helper(env : HTTP::Server::Context, locale : Hash(String, JSON::Any) | Nil, status_code : Int32, exception : Exception, additional_fields : Hash(String, Object) | Nil)
+def error_json_helper(
+ env : HTTP::Server::Context,
+ status_code : Int32,
+ exception : Exception,
+ additional_fields : Hash(String, Object) | Nil = nil
+)
if exception.is_a?(InfoException)
- return error_json_helper(env, locale, status_code, exception.message || "", additional_fields)
+ return error_json_helper(env, status_code, exception.message || "", additional_fields)
end
+
env.response.content_type = "application/json"
env.response.status_code = status_code
+
error_message = {"error" => exception.message, "errorBacktrace" => exception.inspect_with_backtrace}
+
if additional_fields
error_message = error_message.merge(additional_fields)
end
- return error_message.to_json
-end
-def error_json_helper(env : HTTP::Server::Context, locale : Hash(String, JSON::Any) | Nil, status_code : Int32, exception : Exception)
- return error_json_helper(env, locale, status_code, exception, nil)
+ return error_message.to_json
end
-def error_json_helper(env : HTTP::Server::Context, locale : Hash(String, JSON::Any) | Nil, status_code : Int32, message : String, additional_fields : Hash(String, Object) | Nil)
+def error_json_helper(
+ env : HTTP::Server::Context,
+ status_code : Int32,
+ message : String,
+ additional_fields : Hash(String, Object) | Nil = nil
+)
env.response.content_type = "application/json"
env.response.status_code = status_code
+
error_message = {"error" => message}
+
if additional_fields
error_message = error_message.merge(additional_fields)
end
+
return error_message.to_json
end
-def error_json_helper(env : HTTP::Server::Context, locale : Hash(String, JSON::Any) | Nil, status_code : Int32, message : String)
- error_json_helper(env, locale, status_code, message, nil)
+# -------------------
+# Redirect
+# -------------------
+
+def error_redirect_helper(env : HTTP::Server::Context)
+ request_path = env.request.path
+
+ locale = env.get("preferences").as(Preferences).locale
+
+ if request_path.starts_with?("/search") || request_path.starts_with?("/watch") ||
+ request_path.starts_with?("/channel") || request_path.starts_with?("/playlist?list=PL")
+ next_steps_text = translate(locale, "next_steps_error_message")
+ refresh = translate(locale, "next_steps_error_message_refresh")
+ go_to_youtube = translate(locale, "next_steps_error_message_go_to_youtube")
+ switch_instance = translate(locale, "Switch Invidious Instance")
+
+ return <<-END_HTML
+ <p style="margin-bottom: 4px;">#{next_steps_text}</p>
+ <ul>
+ <li>
+ <a href="#{env.request.resource}">#{refresh}</a>
+ </li>
+ <li>
+ <a href="/redirect?referer=#{env.get("current_page")}">#{switch_instance}</a>
+ </li>
+ <li>
+ <a rel="noreferrer noopener" href="https://youtube.com#{env.request.resource}">#{go_to_youtube}</a>
+ </li>
+ </ul>
+ END_HTML
+ else
+ return ""
+ end
end
diff --git a/src/invidious/helpers/handlers.cr b/src/invidious/helpers/handlers.cr
index 045b6701..f3e3b951 100644
--- a/src/invidious/helpers/handlers.cr
+++ b/src/invidious/helpers/handlers.cr
@@ -97,18 +97,18 @@ class AuthHandler < Kemal::Handler
if token = env.request.headers["Authorization"]?
token = JSON.parse(URI.decode_www_form(token.lchop("Bearer ")))
session = URI.decode_www_form(token["session"].as_s)
- scopes, expire, signature = validate_request(token, session, env.request, HMAC_KEY, PG_DB, nil)
+ scopes, _, _ = validate_request(token, session, env.request, HMAC_KEY, nil)
- if email = PG_DB.query_one?("SELECT email FROM session_ids WHERE id = $1", session, as: String)
- user = PG_DB.query_one("SELECT * FROM users WHERE email = $1", email, as: User)
+ if email = Invidious::Database::SessionIDs.select_email(session)
+ user = Invidious::Database::Users.select!(email: email)
end
elsif sid = env.request.cookies["SID"]?.try &.value
if sid.starts_with? "v1:"
raise "Cannot use token as SID"
end
- if email = PG_DB.query_one?("SELECT email FROM session_ids WHERE id = $1", sid, as: String)
- user = PG_DB.query_one("SELECT * FROM users WHERE email = $1", email, as: User)
+ if email = Invidious::Database::SessionIDs.select_email(sid)
+ user = Invidious::Database::Users.select!(email: email)
end
scopes = [":*"]
@@ -142,63 +142,8 @@ class APIHandler < Kemal::Handler
exclude ["/api/v1/auth/notifications"], "POST"
def call(env)
- return call_next env unless only_match? env
-
- env.response.headers["Access-Control-Allow-Origin"] = "*"
-
- # Since /api/v1/notifications is an event-stream, we don't want
- # to wrap the response
- return call_next env if exclude_match? env
-
- # Here we swap out the socket IO so we can modify the response as needed
- output = env.response.output
- env.response.output = IO::Memory.new
-
- begin
- call_next env
-
- env.response.output.rewind
-
- if env.response.output.as(IO::Memory).size != 0 &&
- env.response.headers.includes_word?("Content-Type", "application/json")
- response = JSON.parse(env.response.output)
-
- if fields_text = env.params.query["fields"]?
- begin
- JSONFilter.filter(response, fields_text)
- rescue ex
- env.response.status_code = 400
- response = {"error" => ex.message}
- end
- end
-
- if env.params.query["pretty"]?.try &.== "1"
- response = response.to_pretty_json
- else
- response = response.to_json
- end
- else
- response = env.response.output.gets_to_end
- end
- rescue ex
- env.response.content_type = "application/json" if env.response.headers.includes_word?("Content-Type", "text/html")
- env.response.status_code = 500
-
- if env.response.headers.includes_word?("Content-Type", "application/json")
- response = {"error" => ex.message || "Unspecified error"}
-
- if env.params.query["pretty"]?.try &.== "1"
- response = response.to_pretty_json
- else
- response = response.to_json
- end
- end
- ensure
- env.response.output = output
- env.response.print response
-
- env.response.flush
- end
+ env.response.headers["Access-Control-Allow-Origin"] = "*" if only_match?(env)
+ call_next env
end
end
diff --git a/src/invidious/helpers/helpers.cr b/src/invidious/helpers/helpers.cr
index 6a5789a0..6add0237 100644
--- a/src/invidious/helpers/helpers.cr
+++ b/src/invidious/helpers/helpers.cr
@@ -22,216 +22,6 @@ struct Annotation
property annotations : String
end
-struct ConfigPreferences
- include YAML::Serializable
-
- property annotations : Bool = false
- property annotations_subscribed : Bool = false
- property autoplay : Bool = false
- property captions : Array(String) = ["", "", ""]
- property comments : Array(String) = ["youtube", ""]
- property continue : Bool = false
- property continue_autoplay : Bool = true
- property dark_mode : String = ""
- property latest_only : Bool = false
- property listen : Bool = false
- property local : Bool = false
- property locale : String = "en-US"
- property max_results : Int32 = 40
- property notifications_only : Bool = false
- property player_style : String = "invidious"
- property quality : String = "hd720"
- property quality_dash : String = "auto"
- property default_home : String? = "Popular"
- property feed_menu : Array(String) = ["Popular", "Trending", "Subscriptions", "Playlists"]
- property related_videos : Bool = true
- property sort : String = "published"
- property speed : Float32 = 1.0_f32
- property thin_mode : Bool = false
- property unseen_only : Bool = false
- property video_loop : Bool = false
- property extend_desc : Bool = false
- property volume : Int32 = 100
-
- def to_tuple
- {% begin %}
- {
- {{*@type.instance_vars.map { |var| "#{var.name}: #{var.name}".id }}}
- }
- {% end %}
- end
-end
-
-class Config
- include YAML::Serializable
-
- property channel_threads : Int32 = 1 # Number of threads to use for crawling videos from channels (for updating subscriptions)
- property feed_threads : Int32 = 1 # Number of threads to use for updating feeds
- property output : String = "STDOUT" # Log file path or STDOUT
- property log_level : LogLevel = LogLevel::Info # Default log level, valid YAML values are ints and strings, see src/invidious/helpers/logger.cr
- property db : DBConfig? = nil # Database configuration with separate parameters (username, hostname, etc)
-
- @[YAML::Field(converter: Preferences::URIConverter)]
- property database_url : URI = URI.parse("") # Database configuration using 12-Factor "Database URL" syntax
- property decrypt_polling : Bool = true # Use polling to keep decryption function up to date
- property full_refresh : Bool = false # Used for crawling channels: threads should check all videos uploaded by a channel
- property https_only : Bool? # Used to tell Invidious it is behind a proxy, so links to resources should be https://
- property hmac_key : String? # HMAC signing key for CSRF tokens and verifying pubsub subscriptions
- property domain : String? # Domain to be used for links to resources on the site where an absolute URL is required
- property use_pubsub_feeds : Bool | Int32 = false # Subscribe to channels using PubSubHubbub (requires domain, hmac_key)
- property popular_enabled : Bool = true
- property captcha_enabled : Bool = true
- property login_enabled : Bool = true
- property registration_enabled : Bool = true
- property statistics_enabled : Bool = false
- property admins : Array(String) = [] of String
- property external_port : Int32? = nil
- property default_user_preferences : ConfigPreferences = ConfigPreferences.from_yaml("")
- property dmca_content : Array(String) = [] of String # For compliance with DMCA, disables download widget using list of video IDs
- property check_tables : Bool = false # Check table integrity, automatically try to add any missing columns, create tables, etc.
- property cache_annotations : Bool = false # Cache annotations requested from IA, will not cache empty annotations or annotations that only contain cards
- property banner : String? = nil # Optional banner to be displayed along top of page for announcements, etc.
- property hsts : Bool? = true # Enables 'Strict-Transport-Security'. Ensure that `domain` and all subdomains are served securely
- property disable_proxy : Bool? | Array(String)? = false # Disable proxying server-wide: options: 'dash', 'livestreams', 'downloads', 'local'
-
- @[YAML::Field(converter: Preferences::FamilyConverter)]
- property force_resolve : Socket::Family = Socket::Family::UNSPEC # Connect to YouTube over 'ipv6', 'ipv4'. Will sometimes resolve fix issues with rate-limiting (see https://github.com/ytdl-org/youtube-dl/issues/21729)
- property port : Int32 = 3000 # Port to listen for connections (overrided by command line argument)
- property host_binding : String = "0.0.0.0" # Host to bind (overrided by command line argument)
- property bind_unix : String? = nil # Make Invidious listening on UNIX sockets - Example: /tmp/invidious.sock
- property pool_size : Int32 = 100 # Pool size for HTTP requests to youtube.com and ytimg.com (each domain has a separate pool of `pool_size`)
- property use_quic : Bool = true # Use quic transport for youtube api
-
- @[YAML::Field(converter: Preferences::StringToCookies)]
- property cookies : HTTP::Cookies = HTTP::Cookies.new # Saved cookies in "name1=value1; name2=value2..." format
- property captcha_key : String? = nil # Key for Anti-Captcha
- property captcha_api_url : String = "https://api.anti-captcha.com" # API URL for Anti-Captcha
-
- def disabled?(option)
- case disabled = CONFIG.disable_proxy
- when Bool
- return disabled
- when Array
- if disabled.includes? option
- return true
- else
- return false
- end
- else
- return false
- end
- end
-
- def self.load
- # Load config from file or YAML string env var
- env_config_file = "INVIDIOUS_CONFIG_FILE"
- env_config_yaml = "INVIDIOUS_CONFIG"
-
- config_file = ENV.has_key?(env_config_file) ? ENV.fetch(env_config_file) : "config/config.yml"
- config_yaml = ENV.has_key?(env_config_yaml) ? ENV.fetch(env_config_yaml) : File.read(config_file)
-
- config = Config.from_yaml(config_yaml)
-
- # Update config from env vars (upcased and prefixed with "INVIDIOUS_")
- {% for ivar in Config.instance_vars %}
- {% env_id = "INVIDIOUS_#{ivar.id.upcase}" %}
-
- if ENV.has_key?({{env_id}})
- # puts %(Config.{{ivar.id}} : Loading from env var {{env_id}})
- env_value = ENV.fetch({{env_id}})
- success = false
-
- # Use YAML converter if specified
- {% ann = ivar.annotation(::YAML::Field) %}
- {% if ann && ann[:converter] %}
- puts %(Config.{{ivar.id}} : Parsing "#{env_value}" as {{ivar.type}} with {{ann[:converter]}} converter)
- config.{{ivar.id}} = {{ann[:converter]}}.from_yaml(YAML::ParseContext.new, YAML::Nodes.parse(ENV.fetch({{env_id}})).nodes[0])
- puts %(Config.{{ivar.id}} : Set to #{config.{{ivar.id}}})
- success = true
-
- # Use regular YAML parser otherwise
- {% else %}
- {% ivar_types = ivar.type.union? ? ivar.type.union_types : [ivar.type] %}
- # Sort types to avoid parsing nulls and numbers as strings
- {% ivar_types = ivar_types.sort_by { |ivar_type| ivar_type == Nil ? 0 : ivar_type == Int32 ? 1 : 2 } %}
- {{ivar_types}}.each do |ivar_type|
- if !success
- begin
- # puts %(Config.{{ivar.id}} : Trying to parse "#{env_value}" as #{ivar_type})
- config.{{ivar.id}} = ivar_type.from_yaml(env_value)
- puts %(Config.{{ivar.id}} : Set to #{config.{{ivar.id}}} (#{ivar_type}))
- success = true
- rescue
- # nop
- end
- end
- end
- {% end %}
-
- # Exit on fail
- if !success
- puts %(Config.{{ivar.id}} failed to parse #{env_value} as {{ivar.type}})
- exit(1)
- end
- end
- {% end %}
-
- # Build database_url from db.* if it's not set directly
- if config.database_url.to_s.empty?
- if db = config.db
- config.database_url = URI.new(
- scheme: "postgres",
- user: db.user,
- password: db.password,
- host: db.host,
- port: db.port,
- path: db.dbname,
- )
- else
- puts "Config : Either database_url or db.* is required"
- exit(1)
- end
- end
-
- return config
- end
-end
-
-struct DBConfig
- include YAML::Serializable
-
- property user : String
- property password : String
- property host : String
- property port : Int32
- property dbname : String
-end
-
-def login_req(f_req)
- data = {
- # Unfortunately there's not much information available on `bgRequest`; part of Google's BotGuard
- # Generally this is much longer (>1250 characters), see also
- # https://github.com/ytdl-org/youtube-dl/commit/baf67a604d912722b0fe03a40e9dc5349a2208cb .
- # For now this can be empty.
- "bgRequest" => %|["identifier",""]|,
- "pstMsg" => "1",
- "checkConnection" => "youtube",
- "checkedDomains" => "youtube",
- "hl" => "en",
- "deviceinfo" => %|[null,null,null,[],null,"US",null,null,[],"GlifWebSignIn",null,[null,null,[]]]|,
- "f.req" => f_req,
- "flowName" => "GlifWebSignIn",
- "flowEntry" => "ServiceLogin",
- # "cookiesDisabled" => "false",
- # "gmscoreversion" => "undefined",
- # "continue" => "https://accounts.google.com/ManageAccount",
- # "azt" => "",
- # "bgHash" => "",
- }
-
- return HTTP::Params.encode(data)
-end
-
def html_to_content(description_html : String)
description = description_html.gsub(/(<br>)|(<br\/>)/, {
"<br>": "\n",
@@ -245,287 +35,7 @@ def html_to_content(description_html : String)
return description
end
-def extract_videos(initial_data : Hash(String, JSON::Any), author_fallback : String? = nil, author_id_fallback : String? = nil)
- extract_items(initial_data, author_fallback, author_id_fallback).select(&.is_a?(SearchVideo)).map(&.as(SearchVideo))
-end
-
-def extract_item(item : JSON::Any, author_fallback : String? = nil, author_id_fallback : String? = nil)
- if i = (item["videoRenderer"]? || item["gridVideoRenderer"]?)
- video_id = i["videoId"].as_s
- title = i["title"].try { |t| t["simpleText"]?.try &.as_s || t["runs"]?.try &.as_a.map(&.["text"].as_s).join("") } || ""
-
- author_info = i["ownerText"]?.try &.["runs"]?.try &.as_a?.try &.[0]?
- author = author_info.try &.["text"].as_s || author_fallback || ""
- author_id = author_info.try &.["navigationEndpoint"]?.try &.["browseEndpoint"]["browseId"].as_s || author_id_fallback || ""
-
- published = i["publishedTimeText"]?.try &.["simpleText"]?.try { |t| decode_date(t.as_s) } || Time.local
- view_count = i["viewCountText"]?.try &.["simpleText"]?.try &.as_s.gsub(/\D+/, "").to_i64? || 0_i64
- description_html = i["descriptionSnippet"]?.try { |t| parse_content(t) } || ""
- length_seconds = i["lengthText"]?.try &.["simpleText"]?.try &.as_s.try { |t| decode_length_seconds(t) } ||
- i["thumbnailOverlays"]?.try &.as_a.find(&.["thumbnailOverlayTimeStatusRenderer"]?).try &.["thumbnailOverlayTimeStatusRenderer"]?
- .try &.["text"]?.try &.["simpleText"]?.try &.as_s.try { |t| decode_length_seconds(t) } || 0
-
- live_now = false
- paid = false
- premium = false
-
- premiere_timestamp = i["upcomingEventData"]?.try &.["startTime"]?.try { |t| Time.unix(t.as_s.to_i64) }
-
- i["badges"]?.try &.as_a.each do |badge|
- b = badge["metadataBadgeRenderer"]
- case b["label"].as_s
- when "LIVE NOW"
- live_now = true
- when "New", "4K", "CC"
- # TODO
- when "Premium"
- paid = true
-
- # TODO: Potentially available as i["topStandaloneBadge"]["metadataBadgeRenderer"]
- premium = true
- else nil # Ignore
- end
- end
-
- SearchVideo.new({
- title: title,
- id: video_id,
- author: author,
- ucid: author_id,
- published: published,
- views: view_count,
- description_html: description_html,
- length_seconds: length_seconds,
- live_now: live_now,
- paid: paid,
- premium: premium,
- premiere_timestamp: premiere_timestamp,
- })
- elsif i = item["channelRenderer"]?
- author = i["title"]["simpleText"]?.try &.as_s || author_fallback || ""
- author_id = i["channelId"]?.try &.as_s || author_id_fallback || ""
-
- author_thumbnail = i["thumbnail"]["thumbnails"]?.try &.as_a[0]?.try &.["url"]?.try &.as_s || ""
- subscriber_count = i["subscriberCountText"]?.try &.["simpleText"]?.try &.as_s.try { |s| short_text_to_number(s.split(" ")[0]) } || 0
-
- auto_generated = false
- auto_generated = true if !i["videoCountText"]?
- video_count = i["videoCountText"]?.try &.["runs"].as_a[0]?.try &.["text"].as_s.gsub(/\D/, "").to_i || 0
- description_html = i["descriptionSnippet"]?.try { |t| parse_content(t) } || ""
-
- SearchChannel.new({
- author: author,
- ucid: author_id,
- author_thumbnail: author_thumbnail,
- subscriber_count: subscriber_count,
- video_count: video_count,
- description_html: description_html,
- auto_generated: auto_generated,
- })
- elsif i = item["gridPlaylistRenderer"]?
- title = i["title"]["runs"].as_a[0]?.try &.["text"].as_s || ""
- plid = i["playlistId"]?.try &.as_s || ""
-
- video_count = i["videoCountText"]["runs"].as_a[0]?.try &.["text"].as_s.gsub(/\D/, "").to_i || 0
- playlist_thumbnail = i["thumbnail"]["thumbnails"][0]?.try &.["url"]?.try &.as_s || ""
-
- SearchPlaylist.new({
- title: title,
- id: plid,
- author: author_fallback || "",
- ucid: author_id_fallback || "",
- video_count: video_count,
- videos: [] of SearchPlaylistVideo,
- thumbnail: playlist_thumbnail,
- })
- elsif i = item["playlistRenderer"]?
- title = i["title"]["simpleText"]?.try &.as_s || ""
- plid = i["playlistId"]?.try &.as_s || ""
-
- video_count = i["videoCount"]?.try &.as_s.to_i || 0
- playlist_thumbnail = i["thumbnails"].as_a[0]?.try &.["thumbnails"]?.try &.as_a[0]?.try &.["url"].as_s || ""
-
- author_info = i["shortBylineText"]?.try &.["runs"]?.try &.as_a?.try &.[0]?
- author = author_info.try &.["text"].as_s || author_fallback || ""
- author_id = author_info.try &.["navigationEndpoint"]?.try &.["browseEndpoint"]["browseId"].as_s || author_id_fallback || ""
-
- videos = i["videos"]?.try &.as_a.map do |v|
- v = v["childVideoRenderer"]
- v_title = v["title"]["simpleText"]?.try &.as_s || ""
- v_id = v["videoId"]?.try &.as_s || ""
- v_length_seconds = v["lengthText"]?.try &.["simpleText"]?.try { |t| decode_length_seconds(t.as_s) } || 0
- SearchPlaylistVideo.new({
- title: v_title,
- id: v_id,
- length_seconds: v_length_seconds,
- })
- end || [] of SearchPlaylistVideo
-
- # TODO: i["publishedTimeText"]?
-
- SearchPlaylist.new({
- title: title,
- id: plid,
- author: author,
- ucid: author_id,
- video_count: video_count,
- videos: videos,
- thumbnail: playlist_thumbnail,
- })
- elsif i = item["radioRenderer"]? # Mix
- # TODO
- elsif i = item["showRenderer"]? # Show
- # TODO
- elsif i = item["shelfRenderer"]?
- elsif i = item["horizontalCardListRenderer"]?
- elsif i = item["searchPyvRenderer"]? # Ad
- end
-end
-
-def extract_items(initial_data : Hash(String, JSON::Any), author_fallback : String? = nil, author_id_fallback : String? = nil)
- items = [] of SearchItem
-
- channel_v2_response = initial_data
- .try &.["continuationContents"]?
- .try &.["gridContinuation"]?
- .try &.["items"]?
-
- if channel_v2_response
- channel_v2_response.try &.as_a.each { |item|
- extract_item(item, author_fallback, author_id_fallback)
- .try { |t| items << t }
- }
- else
- initial_data.try { |t| t["contents"]? || t["response"]? }
- .try { |t| t["twoColumnBrowseResultsRenderer"]?.try &.["tabs"].as_a.select(&.["tabRenderer"]?.try &.["selected"].as_bool)[0]?.try &.["tabRenderer"]["content"] ||
- t["twoColumnSearchResultsRenderer"]?.try &.["primaryContents"] ||
- t["continuationContents"]? }
- .try { |t| t["sectionListRenderer"]? || t["sectionListContinuation"]? }
- .try &.["contents"].as_a
- .each { |c| c.try &.["itemSectionRenderer"]?.try &.["contents"].as_a
- .try { |t| t[0]?.try &.["shelfRenderer"]?.try &.["content"]["expandedShelfContentsRenderer"]?.try &.["items"].as_a ||
- t[0]?.try &.["gridRenderer"]?.try &.["items"].as_a || t }
- .each { |item|
- extract_item(item, author_fallback, author_id_fallback)
- .try { |t| items << t }
- } }
- end
-
- items
-end
-
-def check_enum(db, enum_name, struct_type = nil)
- return # TODO
-
- if !db.query_one?("SELECT true FROM pg_type WHERE typname = $1", enum_name, as: Bool)
- LOGGER.info("check_enum: CREATE TYPE #{enum_name}")
-
- db.using_connection do |conn|
- conn.as(PG::Connection).exec_all(File.read("config/sql/#{enum_name}.sql"))
- end
- end
-end
-
-def check_table(db, table_name, struct_type = nil)
- # Create table if it doesn't exist
- begin
- db.exec("SELECT * FROM #{table_name} LIMIT 0")
- rescue ex
- LOGGER.info("check_table: check_table: CREATE TABLE #{table_name}")
-
- db.using_connection do |conn|
- conn.as(PG::Connection).exec_all(File.read("config/sql/#{table_name}.sql"))
- end
- end
-
- return if !struct_type
-
- struct_array = struct_type.type_array
- column_array = get_column_array(db, table_name)
- column_types = File.read("config/sql/#{table_name}.sql").match(/CREATE TABLE public\.#{table_name}\n\((?<types>[\d\D]*?)\);/)
- .try &.["types"].split(",").map { |line| line.strip }.reject &.starts_with?("CONSTRAINT")
-
- return if !column_types
-
- struct_array.each_with_index do |name, i|
- if name != column_array[i]?
- if !column_array[i]?
- new_column = column_types.select { |line| line.starts_with? name }[0]
- LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
- db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
- next
- end
-
- # Column doesn't exist
- if !column_array.includes? name
- new_column = column_types.select { |line| line.starts_with? name }[0]
- db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
- end
-
- # Column exists but in the wrong position, rotate
- if struct_array.includes? column_array[i]
- until name == column_array[i]
- new_column = column_types.select { |line| line.starts_with? column_array[i] }[0]?.try &.gsub("#{column_array[i]}", "#{column_array[i]}_new")
-
- # There's a column we didn't expect
- if !new_column
- LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}")
- db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
-
- column_array = get_column_array(db, table_name)
- next
- end
-
- LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
- db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
-
- LOGGER.info("check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
- db.exec("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
-
- LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
- db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
-
- LOGGER.info("check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
- db.exec("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
-
- column_array = get_column_array(db, table_name)
- end
- else
- LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
- db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
- end
- end
- end
-
- return if column_array.size <= struct_array.size
-
- column_array.each do |column|
- if !struct_array.includes? column
- LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
- db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
- end
- end
-end
-
-class PG::ResultSet
- def field(index = @column_index)
- @fields.not_nil![index]
- end
-end
-
-def get_column_array(db, table_name)
- column_array = [] of String
- db.query("SELECT * FROM #{table_name} LIMIT 0") do |rs|
- rs.column_count.times do |i|
- column = rs.as(PG::ResultSet).field(i)
- column_array << column.name
- end
- end
-
- return column_array
-end
-
-def cache_annotation(db, id, annotations)
+def cache_annotation(id, annotations)
if !CONFIG.cache_annotations
return
end
@@ -543,14 +53,14 @@ def cache_annotation(db, id, annotations)
end
end
- db.exec("INSERT INTO annotations VALUES ($1, $2) ON CONFLICT DO NOTHING", id, annotations) if has_legacy_annotations
+ Invidious::Database::Annotations.insert(id, annotations) if has_legacy_annotations
end
def create_notification_stream(env, topics, connection_channel)
connection = Channel(PQ::Notification).new(8)
connection_channel.send({true, connection})
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ locale = env.get("preferences").as(Preferences).locale
since = env.params.query["since"]?.try &.to_i?
id = 0
@@ -564,18 +74,9 @@ def create_notification_stream(env, topics, connection_channel)
published = Time.utc - Time::Span.new(days: time_span[0], hours: time_span[1], minutes: time_span[2], seconds: time_span[3])
video_id = TEST_IDS[rand(TEST_IDS.size)]
- video = get_video(video_id, PG_DB)
+ video = get_video(video_id)
video.published = published
- response = JSON.parse(video.to_json(locale))
-
- if fields_text = env.params.query["fields"]?
- begin
- JSONFilter.filter(response, fields_text)
- rescue ex
- env.response.status_code = 400
- response = {"error" => ex.message}
- end
- end
+ response = JSON.parse(video.to_json(locale, nil))
env.response.puts "id: #{id}"
env.response.puts "data: #{response.to_json}"
@@ -595,22 +96,14 @@ def create_notification_stream(env, topics, connection_channel)
spawn do
begin
if since
+ since_unix = Time.unix(since.not_nil!)
+
topics.try &.each do |topic|
case topic
when .match(/UC[A-Za-z0-9_-]{22}/)
- PG_DB.query_all("SELECT * FROM channel_videos WHERE ucid = $1 AND published > $2 ORDER BY published DESC LIMIT 15",
- topic, Time.unix(since.not_nil!), as: ChannelVideo).each do |video|
+ Invidious::Database::ChannelVideos.select_notfications(topic, since_unix).each do |video|
response = JSON.parse(video.to_json(locale))
- if fields_text = env.params.query["fields"]?
- begin
- JSONFilter.filter(response, fields_text)
- rescue ex
- env.response.status_code = 400
- response = {"error" => ex.message}
- end
- end
-
env.response.puts "id: #{id}"
env.response.puts "data: #{response.to_json}"
env.response.puts
@@ -640,18 +133,9 @@ def create_notification_stream(env, topics, connection_channel)
next
end
- video = get_video(video_id, PG_DB)
+ video = get_video(video_id)
video.published = Time.unix(published)
- response = JSON.parse(video.to_json(locale))
-
- if fields_text = env.params.query["fields"]?
- begin
- JSONFilter.filter(response, fields_text)
- rescue ex
- env.response.status_code = 400
- response = {"error" => ex.message}
- end
- end
+ response = JSON.parse(video.to_json(locale, nil))
env.response.puts "id: #{id}"
env.response.puts "data: #{response.to_json}"
@@ -698,84 +182,19 @@ def proxy_file(response, env)
end
end
-# See https://github.com/kemalcr/kemal/pull/576
-class HTTP::Server::Response::Output
- def close
- return if closed?
-
- unless response.wrote_headers?
- response.content_length = @out_count
- end
-
- ensure_headers_written
-
- super
-
- if @chunked
- @io << "0\r\n\r\n"
- @io.flush
- end
- end
-end
-
-class HTTP::Client::Response
- def pipe(io)
- HTTP.serialize_body(io, headers, @body, @body_io, @version)
- end
-end
-
-# Supports serialize_body without first writing headers
-module HTTP
- def self.serialize_body(io, headers, body, body_io, version)
- if body
- io << body
- elsif body_io
- content_length = content_length(headers)
- if content_length
- copied = IO.copy(body_io, io)
- if copied != content_length
- raise ArgumentError.new("Content-Length header is #{content_length} but body had #{copied} bytes")
- end
- elsif Client::Response.supports_chunked?(version)
- headers["Transfer-Encoding"] = "chunked"
- serialize_chunked_body(io, body_io)
- else
- io << body
- end
- end
- end
-end
-
-class HTTP::Client
- property family : Socket::Family = Socket::Family::UNSPEC
-
- private def socket
- socket = @socket
- return socket if socket
-
- hostname = @host.starts_with?('[') && @host.ends_with?(']') ? @host[1..-2] : @host
- socket = TCPSocket.new hostname, @port, @dns_timeout, @connect_timeout, @family
- socket.read_timeout = @read_timeout if @read_timeout
- socket.sync = false
-
- {% if !flag?(:without_openssl) %}
- if tls = @tls
- socket = OpenSSL::SSL::Socket::Client.new(socket, context: tls, sync_close: true, hostname: @host)
- end
- {% end %}
+# Fetch the playback requests tracker from the statistics endpoint.
+#
+# Creates a new tracker when unavailable.
+def get_playback_statistic
+ if (tracker = Invidious::Jobs::StatisticsRefreshJob::STATISTICS["playback"]) && tracker.as(Hash).empty?
+ tracker = {
+ "totalRequests" => 0_i64,
+ "successfulRequests" => 0_i64,
+ "ratio" => 0_f64,
+ }
- @socket = socket
+ Invidious::Jobs::StatisticsRefreshJob::STATISTICS["playback"] = tracker
end
-end
-class TCPSocket
- def initialize(host, port, dns_timeout = nil, connect_timeout = nil, family = Socket::Family::UNSPEC)
- Addrinfo.tcp(host, port, timeout: dns_timeout, family: family) do |addrinfo|
- super(addrinfo.family, addrinfo.type, addrinfo.protocol)
- connect(addrinfo, timeout: connect_timeout) do |error|
- close
- error
- end
- end
- end
+ return tracker.as(Hash(String, Int64 | Float64))
end
diff --git a/src/invidious/helpers/i18n.cr b/src/invidious/helpers/i18n.cr
index 45a3f1ae..1ba3ea61 100644
--- a/src/invidious/helpers/i18n.cr
+++ b/src/invidious/helpers/i18n.cr
@@ -1,72 +1,185 @@
-LOCALES = {
- "ar" => load_locale("ar"),
- "de" => load_locale("de"),
- "el" => load_locale("el"),
- "en-US" => load_locale("en-US"),
- "eo" => load_locale("eo"),
- "es" => load_locale("es"),
- "fa" => load_locale("fa"),
- "fi" => load_locale("fi"),
- "fr" => load_locale("fr"),
- "he" => load_locale("he"),
- "hr" => load_locale("hr"),
- "id" => load_locale("id"),
- "is" => load_locale("is"),
- "it" => load_locale("it"),
- "ja" => load_locale("ja"),
- "nb-NO" => load_locale("nb-NO"),
- "nl" => load_locale("nl"),
- "pl" => load_locale("pl"),
- "pt-BR" => load_locale("pt-BR"),
- "pt-PT" => load_locale("pt-PT"),
- "ro" => load_locale("ro"),
- "ru" => load_locale("ru"),
- "sv" => load_locale("sv-SE"),
- "tr" => load_locale("tr"),
- "uk" => load_locale("uk"),
- "zh-CN" => load_locale("zh-CN"),
- "zh-TW" => load_locale("zh-TW"),
+# Languages requiring a better level of translation (at least 20%)
+# to be added to the list below:
+#
+# "af" => "", # Afrikaans
+# "az" => "", # Azerbaijani
+# "be" => "", # Belarusian
+# "bn_BD" => "", # Bengali (Bangladesh)
+# "ia" => "", # Interlingua
+# "or" => "", # Odia
+# "tk" => "", # Turkmen
+# "tok => "", # Toki Pona
+#
+LOCALES_LIST = {
+ "ar" => "العربية", # Arabic
+ "bg" => "български", # Bulgarian
+ "bn" => "বাংলা", # Bengali
+ "ca" => "Català", # Catalan
+ "cs" => "Čeština", # Czech
+ "cy" => "Cymraeg", # Welsh
+ "da" => "Dansk", # Danish
+ "de" => "Deutsch", # German
+ "el" => "Ελληνικά", # Greek
+ "en-US" => "English", # English
+ "eo" => "Esperanto", # Esperanto
+ "es" => "Español", # Spanish
+ "et" => "Eesti keel", # Estonian
+ "eu" => "Euskara", # Basque
+ "fa" => "فارسی", # Persian
+ "fi" => "Suomi", # Finnish
+ "fr" => "Français", # French
+ "he" => "עברית", # Hebrew
+ "hi" => "हिन्दी", # Hindi
+ "hr" => "Hrvatski", # Croatian
+ "hu-HU" => "Magyar Nyelv", # Hungarian
+ "id" => "Bahasa Indonesia", # Indonesian
+ "is" => "Íslenska", # Icelandic
+ "it" => "Italiano", # Italian
+ "ja" => "日本語", # Japanese
+ "ko" => "한국어", # Korean
+ "lmo" => "Lombard", # Lombard
+ "lt" => "Lietuvių", # Lithuanian
+ "nb-NO" => "Norsk bokmål", # Norwegian Bokmål
+ "nl" => "Nederlands", # Dutch
+ "pl" => "Polski", # Polish
+ "pt" => "Português", # Portuguese
+ "pt-BR" => "Português Brasileiro", # Portuguese (Brazil)
+ "pt-PT" => "Português de Portugal", # Portuguese (Portugal)
+ "ro" => "Română", # Romanian
+ "ru" => "Русский", # Russian
+ "si" => "සිංහල", # Sinhala
+ "sk" => "Slovenčina", # Slovak
+ "sl" => "Slovenščina", # Slovenian
+ "sq" => "Shqip", # Albanian
+ "sr" => "Srpski (latinica)", # Serbian (Latin)
+ "sr_Cyrl" => "Српски (ћирилица)", # Serbian (Cyrillic)
+ "sv-SE" => "Svenska", # Swedish
+ "tr" => "Türkçe", # Turkish
+ "uk" => "Українська", # Ukrainian
+ "vi" => "Tiếng Việt", # Vietnamese
+ "zh-CN" => "汉语", # Chinese (Simplified)
+ "zh-TW" => "漢語", # Chinese (Traditional)
}
-def load_locale(name)
- return JSON.parse(File.read("locales/#{name}.json")).as_h
+LOCALES = load_all_locales()
+
+CONTENT_REGIONS = {
+ "AE", "AR", "AT", "AU", "AZ", "BA", "BD", "BE", "BG", "BH", "BO", "BR", "BY",
+ "CA", "CH", "CL", "CO", "CR", "CY", "CZ", "DE", "DK", "DO", "DZ", "EC", "EE",
+ "EG", "ES", "FI", "FR", "GB", "GE", "GH", "GR", "GT", "HK", "HN", "HR", "HU",
+ "ID", "IE", "IL", "IN", "IQ", "IS", "IT", "JM", "JO", "JP", "KE", "KR", "KW",
+ "KZ", "LB", "LI", "LK", "LT", "LU", "LV", "LY", "MA", "ME", "MK", "MT", "MX",
+ "MY", "NG", "NI", "NL", "NO", "NP", "NZ", "OM", "PA", "PE", "PG", "PH", "PK",
+ "PL", "PR", "PT", "PY", "QA", "RO", "RS", "RU", "SA", "SE", "SG", "SI", "SK",
+ "SN", "SV", "TH", "TN", "TR", "TW", "TZ", "UA", "UG", "US", "UY", "VE", "VN",
+ "YE", "ZA", "ZW",
+}
+
+# Enum for the different types of number formats
+enum NumberFormatting
+ None # Print the number as-is
+ Separator # Use a separator for thousands
+ Short # Use short notation (k/M/B)
+ HtmlSpan # Surround with <span id="count"></span>
+end
+
+def load_all_locales
+ locales = {} of String => Hash(String, JSON::Any)
+
+ LOCALES_LIST.each_key do |name|
+ locales[name] = JSON.parse(File.read("locales/#{name}.json")).as_h
+ end
+
+ return locales
end
-def translate(locale : Hash(String, JSON::Any) | Nil, translation : String, text : String | Nil = nil)
- # if locale && !locale[translation]?
- # puts "Could not find translation for #{translation.dump}"
- # end
+def translate(locale : String?, key : String, text : String | Hash(String, String) | Nil = nil) : String
+ # Log a warning if "key" doesn't exist in en-US locale and return
+ # that key as the text, so this is more or less transparent to the user.
+ if !LOCALES["en-US"].has_key?(key)
+ LOGGER.warn("i18n: Missing translation key \"#{key}\"")
+ return key
+ end
+
+ # Default to english, whenever the locale doesn't exist,
+ # or the key requested has not been translated
+ if locale && LOCALES.has_key?(locale) && LOCALES[locale].has_key?(key)
+ raw_data = LOCALES[locale][key]
+ else
+ raw_data = LOCALES["en-US"][key]
+ end
- if locale && locale[translation]?
- case locale[translation]
- when .as_h?
- match_length = 0
+ case raw_data
+ when .as_h?
+ # Init
+ translation = ""
+ match_length = 0
- locale[translation].as_h.each do |key, value|
- if md = text.try &.match(/#{key}/)
+ raw_data.as_h.each do |hash_key, value|
+ if text.is_a?(String)
+ if md = text.try &.match(/#{hash_key}/)
if md[0].size >= match_length
translation = value.as_s
match_length = md[0].size
end
end
end
- when .as_s?
- if !locale[translation].as_s.empty?
- translation = locale[translation].as_s
- end
- else
- raise "Invalid translation #{translation}"
end
+ when .as_s?
+ translation = raw_data.as_s
+ else
+ raise "Invalid translation \"#{raw_data}\""
end
- if text
+ if text.is_a?(String)
translation = translation.gsub("`x`", text)
+ elsif text.is_a?(Hash(String, String))
+ # adds support for multi string interpolation. Based on i18next https://www.i18next.com/translation-function/interpolation#basic
+ text.each_key do |hash_key|
+ translation = translation.gsub("{{#{hash_key}}}", text[hash_key])
+ end
end
return translation
end
-def translate_bool(locale : Hash(String, JSON::Any) | Nil, translation : Bool)
+def translate_count(locale : String, key : String, count : Int, format = NumberFormatting::None) : String
+ # Fallback on english if locale doesn't exist
+ locale = "en-US" if !LOCALES.has_key?(locale)
+
+ # Retrieve suffix
+ suffix = I18next::Plurals::RESOLVER.get_suffix(locale, count)
+ plural_key = key + suffix
+
+ if LOCALES[locale].has_key?(plural_key)
+ translation = LOCALES[locale][plural_key].as_s
+ else
+ # Try #1: Fallback to singular in the same locale
+ singular_suffix = I18next::Plurals::RESOLVER.get_suffix(locale, 1)
+
+ if LOCALES[locale].has_key?(key + singular_suffix)
+ translation = LOCALES[locale][key + singular_suffix].as_s
+ elsif locale != "en-US"
+ # Try #2: Fallback to english
+ translation = translate_count("en-US", key, count)
+ else
+ # Return key if we're already in english, as the translation is missing
+ LOGGER.warn("i18n: Missing translation key \"#{key}\"")
+ return key
+ end
+ end
+
+ case format
+ when .separator? then count_txt = number_with_separator(count)
+ when .short? then count_txt = number_to_short_text(count)
+ when .html_span? then count_txt = "<span id=\"count\">" + count.to_s + "</span>"
+ else count_txt = count.to_s
+ end
+
+ return translation.gsub("{{count}}", count_txt)
+end
+
+def translate_bool(locale : String?, translation : Bool)
case translation
when true
return translate(locale, "Yes")
@@ -74,3 +187,12 @@ def translate_bool(locale : Hash(String, JSON::Any) | Nil, translation : Bool)
return translate(locale, "No")
end
end
+
+def locale_is_rtl?(locale : String?)
+ # Fallback to en-US
+ return false if locale.nil?
+
+ # Arabic, Persian, Hebrew
+ # See https://en.wikipedia.org/wiki/Right-to-left_script#List_of_RTL_scripts
+ return {"ar", "fa", "he"}.includes? locale
+end
diff --git a/src/invidious/helpers/i18next.cr b/src/invidious/helpers/i18next.cr
new file mode 100644
index 00000000..684e6d14
--- /dev/null
+++ b/src/invidious/helpers/i18next.cr
@@ -0,0 +1,566 @@
+# I18next-compatible implementation of plural forms
+#
+module I18next::Plurals
+ # -----------------------------------
+ # I18next plural forms definition
+ # -----------------------------------
+
+ enum PluralForms
+ # One singular, one plural forms
+ Single_gt_one = 1 # E.g: French
+ Single_not_one = 2 # E.g: English
+
+ # No plural forms (E.g: Azerbaijani)
+ None = 3
+
+ # One singular, two plural forms
+ Dual_Slavic = 4 # E.g: Russian
+
+ # Special cases (rules used by only one or two language(s))
+ Special_Arabic = 5
+ Special_Czech_Slovak = 6
+ Special_Polish_Kashubian = 7
+ Special_Welsh = 8
+ Special_Irish = 10
+ Special_Scottish_Gaelic = 11
+ Special_Icelandic = 12
+ Special_Javanese = 13
+ Special_Cornish = 14
+ Special_Lithuanian = 15
+ Special_Latvian = 16
+ Special_Macedonian = 17
+ Special_Mandinka = 18
+ Special_Maltese = 19
+ Special_Romanian = 20
+ Special_Slovenian = 21
+ Special_Hebrew = 22
+ Special_Odia = 23
+
+ # Mixed v3/v4 rules in Weblate
+ # `es`, `pt` and `pt-PT` doesn't seem to have been refreshed
+ # by weblate yet, but I suspect it will happen one day.
+ # See: https://github.com/translate/translate/issues/4873
+ Special_French_Portuguese
+ Special_Hungarian_Serbian
+ Special_Spanish_Italian
+ end
+
+ private PLURAL_SETS = {
+ PluralForms::Single_gt_one => [
+ "ach", "ak", "am", "arn", "br", "fa", "fil", "gun", "ln", "mfe", "mg",
+ "mi", "oc", "pt-PT", "tg", "tl", "ti", "tr", "uz", "wa",
+ ],
+ PluralForms::Single_not_one => [
+ "af", "an", "ast", "az", "bg", "bn", "ca", "da", "de", "dev", "el", "en",
+ "eo", "et", "eu", "fi", "fo", "fur", "fy", "gl", "gu", "ha", "hi",
+ "hu", "hy", "ia", "kk", "kn", "ku", "lb", "mai", "ml", "mn", "mr",
+ "nah", "nap", "nb", "ne", "nl", "nn", "no", "nso", "pa", "pap", "pms",
+ "ps", "rm", "sco", "se", "si", "so", "son", "sq", "sv", "sw",
+ "ta", "te", "tk", "ur", "yo",
+ ],
+ PluralForms::None => [
+ "ay", "bo", "cgg", "ht", "id", "ja", "jbo", "ka", "km", "ko", "ky",
+ "lo", "ms", "sah", "su", "th", "tt", "ug", "vi", "wo", "zh",
+ ],
+ PluralForms::Dual_Slavic => [
+ "be", "bs", "cnr", "dz", "ru", "uk",
+ ],
+ }
+
+ private PLURAL_SINGLES = {
+ "ar" => PluralForms::Special_Arabic,
+ "cs" => PluralForms::Special_Czech_Slovak,
+ "csb" => PluralForms::Special_Polish_Kashubian,
+ "cy" => PluralForms::Special_Welsh,
+ "ga" => PluralForms::Special_Irish,
+ "gd" => PluralForms::Special_Scottish_Gaelic,
+ "he" => PluralForms::Special_Hebrew,
+ "is" => PluralForms::Special_Icelandic,
+ "iw" => PluralForms::Special_Hebrew,
+ "jv" => PluralForms::Special_Javanese,
+ "kw" => PluralForms::Special_Cornish,
+ "lt" => PluralForms::Special_Lithuanian,
+ "lv" => PluralForms::Special_Latvian,
+ "mk" => PluralForms::Special_Macedonian,
+ "mnk" => PluralForms::Special_Mandinka,
+ "mt" => PluralForms::Special_Maltese,
+ "or" => PluralForms::Special_Odia,
+ "pl" => PluralForms::Special_Polish_Kashubian,
+ "ro" => PluralForms::Special_Romanian,
+ "sk" => PluralForms::Special_Czech_Slovak,
+ "sl" => PluralForms::Special_Slovenian,
+ # Mixed v3/v4 rules
+ "es" => PluralForms::Special_Spanish_Italian,
+ "fr" => PluralForms::Special_French_Portuguese,
+ "hr" => PluralForms::Special_Hungarian_Serbian,
+ "it" => PluralForms::Special_Spanish_Italian,
+ "pt" => PluralForms::Special_French_Portuguese,
+ "sr" => PluralForms::Special_Hungarian_Serbian,
+ }
+
+ # These are the v1 and v2 compatible suffixes.
+ # The array indices matches the PluralForms enum above.
+ private NUMBERS = [
+ [1, 2], # 1
+ [1, 2], # 2
+ [1], # 3
+ [1, 2, 5], # 4
+ [0, 1, 2, 3, 11, 100], # 5
+ [1, 2, 5], # 6
+ [1, 2, 5], # 7
+ [1, 2, 3, 8], # 8
+ [1, 2], # 9 (not used)
+ [1, 2, 3, 7, 11], # 10
+ [1, 2, 3, 20], # 11
+ [1, 2], # 12
+ [0, 1], # 13
+ [1, 2, 3, 4], # 14
+ [1, 2, 10], # 15
+ [1, 2, 0], # 16
+ [1, 2], # 17
+ [0, 1, 2], # 18
+ [1, 2, 11, 20], # 19
+ [1, 2, 20], # 20
+ [5, 1, 2, 3], # 21
+ [1, 2, 20, 21], # 22
+ [2, 1], # 23 (Odia)
+ ]
+
+ # -----------------------------------
+ # I18next plural resolver class
+ # -----------------------------------
+
+ RESOLVER = Resolver.new
+
+ class Resolver
+ private property forms = {} of String => PluralForms
+ property version : UInt8 = 3
+
+ # Options
+ property simplify_plural_suffix : Bool = true
+
+ def initialize(version : Int = 3)
+ # Sanity checks
+ # V4 isn't supported, as it requires a full CLDR database.
+ if version > 4 || version == 0
+ raise "Invalid i18next version: v#{version}."
+ elsif version == 4
+ # Logger.error("Unsupported i18next version: v4. Falling back to v3")
+ @version = 3_u8
+ else
+ @version = version.to_u8
+ end
+
+ self.init_rules
+ end
+
+ def init_rules
+ # Look into sets
+ PLURAL_SETS.each do |form, langs|
+ langs.each { |lang| self.forms[lang] = form }
+ end
+
+ # Add plurals from the "singles" set
+ self.forms.merge!(PLURAL_SINGLES)
+ end
+
+ def get_plural_form(locale : String) : PluralForms
+ # Extract the ISO 639-1 or 639-2 code from an RFC 5646 language code
+ if !locale.matches?(/^pt-PT$/)
+ locale = locale.split('-')[0]
+ end
+
+ return self.forms[locale] if self.forms[locale]?
+
+ # If nothing was found, then use the most common form, i.e
+ # one singular and one plural, as in english. Not perfect,
+ # but better than yielding an exception at the user.
+ return PluralForms::Single_not_one
+ end
+
+ def get_suffix(locale : String, count : Int) : String
+ # Checked count must be absolute. In i18next, `rule.noAbs` is used to
+ # determine if comparison should be done on a signed or unsigned integer,
+ # but this variable is never set, resulting in the comparison always
+ # being done on absolute numbers.
+ return get_suffix_retrocompat(locale, count.abs)
+ end
+
+ # Emulate the `rule.numbers.size == 2 && rule.numbers[0] == 1` check
+ # from original i18next code
+ private def simple_plural?(form : PluralForms) : Bool
+ case form
+ when .single_gt_one? then return true
+ when .single_not_one? then return true
+ when .special_icelandic? then return true
+ when .special_macedonian? then return true
+ else
+ return false
+ end
+ end
+
+ private def get_suffix_retrocompat(locale : String, count : Int) : String
+ # Get plural form
+ plural_form = get_plural_form(locale)
+
+ # Languages with no plural have the "_0" suffix
+ return "_0" if plural_form.none?
+
+ # Get the index and suffix for this number
+ idx = SuffixIndex.get_index(plural_form, count)
+
+ # Simple plurals are handled differently in all versions (but v4)
+ if @simplify_plural_suffix && simple_plural?(plural_form)
+ return (idx == 1) ? "_plural" : ""
+ end
+
+ # More complex plurals
+ # TODO: support v1 and v2
+ # TODO: support `options.prepend` (v2 and v3)
+ # this.options.prepend && suffix.toString() ? this.options.prepend + suffix.toString() : suffix.toString()
+ #
+ # case @version
+ # when 1
+ # suffix = SUFFIXES_V1_V2[plural_form.to_i][idx]
+ # return (suffix == 1) ? "" : return "_plural_#{suffix}"
+ # when 2
+ # return "_#{suffix}"
+ # else # v3
+ return "_#{idx}"
+ # end
+ end
+ end
+
+ # -----------------------------
+ # Plural functions
+ # -----------------------------
+
+ module SuffixIndex
+ def self.get_index(plural_form : PluralForms, count : Int) : UInt8
+ case plural_form
+ when .single_gt_one? then return (count > 1) ? 1_u8 : 0_u8
+ when .single_not_one? then return (count != 1) ? 1_u8 : 0_u8
+ when .none? then return 0_u8
+ when .dual_slavic? then return dual_slavic(count)
+ when .special_arabic? then return special_arabic(count)
+ when .special_czech_slovak? then return special_czech_slovak(count)
+ when .special_polish_kashubian? then return special_polish_kashubian(count)
+ when .special_welsh? then return special_welsh(count)
+ when .special_irish? then return special_irish(count)
+ when .special_scottish_gaelic? then return special_scottish_gaelic(count)
+ when .special_icelandic? then return special_icelandic(count)
+ when .special_javanese? then return special_javanese(count)
+ when .special_cornish? then return special_cornish(count)
+ when .special_lithuanian? then return special_lithuanian(count)
+ when .special_latvian? then return special_latvian(count)
+ when .special_macedonian? then return special_macedonian(count)
+ when .special_mandinka? then return special_mandinka(count)
+ when .special_maltese? then return special_maltese(count)
+ when .special_romanian? then return special_romanian(count)
+ when .special_slovenian? then return special_slovenian(count)
+ when .special_hebrew? then return special_hebrew(count)
+ when .special_odia? then return special_odia(count)
+ # Mixed v3/v4 forms
+ when .special_spanish_italian? then return special_cldr_spanish_italian(count)
+ when .special_french_portuguese? then return special_cldr_french_portuguese(count)
+ when .special_hungarian_serbian? then return special_cldr_hungarian_serbian(count)
+ else
+ # default, if nothing matched above
+ return 0_u8
+ end
+ end
+
+ # Plural form of Slavic languages (E.g: Russian)
+ #
+ # Corresponds to i18next rule #4
+ # Rule: (n%10==1 && n%100!=11 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2)
+ #
+ def self.dual_slavic(count : Int) : UInt8
+ n_mod_10 = count % 10
+ n_mod_100 = count % 100
+
+ if n_mod_10 == 1 && n_mod_100 != 11
+ return 0_u8
+ elsif n_mod_10 >= 2 && n_mod_10 <= 4 && (n_mod_100 < 10 || n_mod_100 >= 20)
+ return 1_u8
+ else
+ return 2_u8
+ end
+ end
+
+ # Plural form for Arabic language
+ #
+ # Corresponds to i18next rule #5
+ # Rule: (n==0 ? 0 : n==1 ? 1 : n==2 ? 2 : n%100>=3 && n%100<=10 ? 3 : n%100>=11 ? 4 : 5)
+ #
+ def self.special_arabic(count : Int) : UInt8
+ return count.to_u8 if (count == 0 || count == 1 || count == 2)
+
+ n_mod_100 = count % 100
+
+ return 3_u8 if (n_mod_100 >= 3 && n_mod_100 <= 10)
+ return 4_u8 if (n_mod_100 >= 11)
+ return 5_u8
+ end
+
+ # Plural form for Czech and Slovak languages
+ #
+ # Corresponds to i18next rule #6
+ # Rule: ((n==1) ? 0 : (n>=2 && n<=4) ? 1 : 2)
+ #
+ def self.special_czech_slovak(count : Int) : UInt8
+ return 0_u8 if (count == 1)
+ return 1_u8 if (count >= 2 && count <= 4)
+ return 2_u8
+ end
+
+ # Plural form for Polish and Kashubian languages
+ #
+ # Corresponds to i18next rule #7
+ # Rule: (n==1 ? 0 : n%10>=2 && n%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2)
+ #
+ def self.special_polish_kashubian(count : Int) : UInt8
+ return 0_u8 if (count == 1)
+
+ n_mod_10 = count % 10
+ n_mod_100 = count % 100
+
+ if n_mod_10 >= 2 && n_mod_10 <= 4 && (n_mod_100 < 10 || n_mod_100 >= 20)
+ return 1_u8
+ else
+ return 2_u8
+ end
+ end
+
+ # Plural form for Welsh language
+ #
+ # Corresponds to i18next rule #8
+ # Rule: ((n==1) ? 0 : (n==2) ? 1 : (n != 8 && n != 11) ? 2 : 3)
+ #
+ def self.special_welsh(count : Int) : UInt8
+ return 0_u8 if (count == 1)
+ return 1_u8 if (count == 2)
+ return 2_u8 if (count != 8 && count != 11)
+ return 3_u8
+ end
+
+ # Plural form for Irish language
+ #
+ # Corresponds to i18next rule #10
+ # Rule: (n==1 ? 0 : n==2 ? 1 : n<7 ? 2 : n<11 ? 3 : 4)
+ #
+ def self.special_irish(count : Int) : UInt8
+ return 0_u8 if (count == 1)
+ return 1_u8 if (count == 2)
+ return 2_u8 if (count < 7)
+ return 3_u8 if (count < 11)
+ return 4_u8
+ end
+
+ # Plural form for Gaelic language
+ #
+ # Corresponds to i18next rule #11
+ # Rule: ((n==1 || n==11) ? 0 : (n==2 || n==12) ? 1 : (n > 2 && n < 20) ? 2 : 3)
+ #
+ def self.special_scottish_gaelic(count : Int) : UInt8
+ return 0_u8 if (count == 1 || count == 11)
+ return 1_u8 if (count == 2 || count == 12)
+ return 2_u8 if (count > 2 && count < 20)
+ return 3_u8
+ end
+
+ # Plural form for Icelandic language
+ #
+ # Corresponds to i18next rule #12
+ # Rule: (n%10!=1 || n%100==11)
+ #
+ def self.special_icelandic(count : Int) : UInt8
+ if (count % 10) != 1 || (count % 100) == 11
+ return 1_u8
+ else
+ return 0_u8
+ end
+ end
+
+ # Plural form for Javanese language
+ #
+ # Corresponds to i18next rule #13
+ # Rule: (n !== 0)
+ #
+ def self.special_javanese(count : Int) : UInt8
+ return (count != 0) ? 1_u8 : 0_u8
+ end
+
+ # Plural form for Cornish language
+ #
+ # Corresponds to i18next rule #14
+ # Rule: ((n==1) ? 0 : (n==2) ? 1 : (n == 3) ? 2 : 3)
+ #
+ def self.special_cornish(count : Int) : UInt8
+ return 0_u8 if count == 1
+ return 1_u8 if count == 2
+ return 2_u8 if count == 3
+ return 3_u8
+ end
+
+ # Plural form for Lithuanian language
+ #
+ # Corresponds to i18next rule #15
+ # Rule: (n%10==1 && n%100!=11 ? 0 : n%10>=2 && (n%100<10 || n%100>=20) ? 1 : 2)
+ #
+ def self.special_lithuanian(count : Int) : UInt8
+ n_mod_10 = count % 10
+ n_mod_100 = count % 100
+
+ if n_mod_10 == 1 && n_mod_100 != 11
+ return 0_u8
+ elsif n_mod_10 >= 2 && (n_mod_100 < 10 || n_mod_100 >= 20)
+ return 1_u8
+ else
+ return 2_u8
+ end
+ end
+
+ # Plural form for Latvian language
+ #
+ # Corresponds to i18next rule #16
+ # Rule: (n%10==1 && n%100!=11 ? 0 : n !== 0 ? 1 : 2)
+ #
+ def self.special_latvian(count : Int) : UInt8
+ if (count % 10) == 1 && (count % 100) != 11
+ return 0_u8
+ elsif count != 0
+ return 1_u8
+ else
+ return 2_u8
+ end
+ end
+
+ # Plural form for Macedonian language
+ #
+ # Corresponds to i18next rule #17
+ # Rule: (n==1 || n%10==1 && n%100!=11 ? 0 : 1)
+ #
+ def self.special_macedonian(count : Int) : UInt8
+ if count == 1 || ((count % 10) == 1 && (count % 100) != 11)
+ return 0_u8
+ else
+ return 1_u8
+ end
+ end
+
+ # Plural form for Mandinka language
+ #
+ # Corresponds to i18next rule #18
+ # Rule: (n==0 ? 0 : n==1 ? 1 : 2)
+ #
+ def self.special_mandinka(count : Int) : UInt8
+ return (count == 0 || count == 1) ? count.to_u8 : 2_u8
+ end
+
+ # Plural form for Maltese language
+ #
+ # Corresponds to i18next rule #19
+ # Rule: (n==1 ? 0 : n==0 || ( n%100>1 && n%100<11) ? 1 : (n%100>10 && n%100<20 ) ? 2 : 3)
+ #
+ def self.special_maltese(count : Int) : UInt8
+ return 0_u8 if count == 1
+ return 1_u8 if count == 0
+
+ n_mod_100 = count % 100
+ return 1_u8 if (n_mod_100 > 1 && n_mod_100 < 11)
+ return 2_u8 if (n_mod_100 > 10 && n_mod_100 < 20)
+ return 3_u8
+ end
+
+ # Plural form for Romanian language
+ #
+ # Corresponds to i18next rule #20
+ # Rule: (n==1 ? 0 : (n==0 || (n%100 > 0 && n%100 < 20)) ? 1 : 2)
+ #
+ def self.special_romanian(count : Int) : UInt8
+ return 0_u8 if count == 1
+ return 1_u8 if count == 0
+
+ n_mod_100 = count % 100
+ return 1_u8 if (n_mod_100 > 0 && n_mod_100 < 20)
+ return 2_u8
+ end
+
+ # Plural form for Slovenian language
+ #
+ # Corresponds to i18next rule #21
+ # Rule: (n%100==1 ? 1 : n%100==2 ? 2 : n%100==3 || n%100==4 ? 3 : 0)
+ #
+ def self.special_slovenian(count : Int) : UInt8
+ n_mod_100 = count % 100
+ return 1_u8 if (n_mod_100 == 1)
+ return 2_u8 if (n_mod_100 == 2)
+ return 3_u8 if (n_mod_100 == 3 || n_mod_100 == 4)
+ return 0_u8
+ end
+
+ # Plural form for Hebrew language
+ #
+ # Corresponds to i18next rule #22
+ # Rule: (n==1 ? 0 : n==2 ? 1 : (n<0 || n>10) && n%10==0 ? 2 : 3)
+ #
+ def self.special_hebrew(count : Int) : UInt8
+ return 0_u8 if (count == 1)
+ return 1_u8 if (count == 2)
+
+ if (count < 0 || count > 10) && (count % 10) == 0
+ return 2_u8
+ else
+ return 3_u8
+ end
+ end
+
+ # Plural form for Odia ("or") language
+ #
+ # This one is a bit special. It should use rule #2 (like english)
+ # but the "numbers" (suffixes?) it has are inverted, so we'll make a
+ # special rule for it.
+ #
+ def self.special_odia(count : Int) : UInt8
+ return (count == 1) ? 0_u8 : 1_u8
+ end
+
+ # -------------------
+ # "v3.5" rules
+ # -------------------
+
+ # Plural form for Spanish & Italian languages
+ #
+ # This rule is mostly compliant to CLDR v42
+ #
+ def self.special_cldr_spanish_italian(count : Int) : UInt8
+ return 0_u8 if (count == 1) # one
+ return 1_u8 if (count != 0 && count % 1_000_000 == 0) # many
+ return 2_u8 # other
+ end
+
+ # Plural form for French and Portuguese
+ #
+ # This rule is mostly compliant to CLDR v42
+ #
+ def self.special_cldr_french_portuguese(count : Int) : UInt8
+ return 0_u8 if (count == 0 || count == 1) # one
+ return 1_u8 if (count % 1_000_000 == 0) # many
+ return 2_u8 # other
+ end
+
+ # Plural form for Hungarian and Serbian
+ #
+ # This rule is mostly compliant to CLDR v42
+ #
+ def self.special_cldr_hungarian_serbian(count : Int) : UInt8
+ n_mod_10 = count % 10
+ n_mod_100 = count % 100
+
+ return 0_u8 if (n_mod_10 == 1 && n_mod_100 != 11) # one
+ return 1_u8 if (2 <= n_mod_10 <= 4 && (n_mod_100 < 12 || 14 < n_mod_100)) # few
+ return 2_u8 # other
+ end
+ end
+end
diff --git a/src/invidious/helpers/json_filter.cr b/src/invidious/helpers/json_filter.cr
deleted file mode 100644
index e4b57cea..00000000
--- a/src/invidious/helpers/json_filter.cr
+++ /dev/null
@@ -1,248 +0,0 @@
-module JSONFilter
- alias BracketIndex = Hash(Int64, Int64)
-
- alias GroupedFieldsValue = String | Array(GroupedFieldsValue)
- alias GroupedFieldsList = Array(GroupedFieldsValue)
-
- class FieldsParser
- class ParseError < Exception
- end
-
- # Returns the `Regex` pattern used to match nest groups
- def self.nest_group_pattern : Regex
- # uses a '.' character to match json keys as they are allowed
- # to contain any unicode codepoint
- /(?:|,)(?<groupname>[^,\n]*?)\(/
- end
-
- # Returns the `Regex` pattern used to check if there are any empty nest groups
- def self.unnamed_nest_group_pattern : Regex
- /^\(|\(\(|\/\(/
- end
-
- def self.parse_fields(fields_text : String) : Nil
- if fields_text.empty?
- raise FieldsParser::ParseError.new "Fields is empty"
- end
-
- opening_bracket_count = fields_text.count('(')
- closing_bracket_count = fields_text.count(')')
-
- if opening_bracket_count != closing_bracket_count
- bracket_type = opening_bracket_count > closing_bracket_count ? "opening" : "closing"
- raise FieldsParser::ParseError.new "There are too many #{bracket_type} brackets (#{opening_bracket_count}:#{closing_bracket_count})"
- elsif match_result = unnamed_nest_group_pattern.match(fields_text)
- raise FieldsParser::ParseError.new "Unnamed nest group at position #{match_result.begin}"
- end
-
- # first, handle top-level single nested properties: items/id, playlistItems/snippet, etc
- parse_single_nests(fields_text) { |nest_list| yield nest_list }
-
- # next, handle nest groups: items(id, etag, etc)
- parse_nest_groups(fields_text) { |nest_list| yield nest_list }
- end
-
- def self.parse_single_nests(fields_text : String) : Nil
- single_nests = remove_nest_groups(fields_text)
-
- if !single_nests.empty?
- property_nests = single_nests.split(',')
-
- property_nests.each do |nest|
- nest_list = nest.split('/')
- if nest_list.includes? ""
- raise FieldsParser::ParseError.new "Empty key in nest list: #{nest_list}"
- end
- yield nest_list
- end
- # else
- # raise FieldsParser::ParseError.new "Empty key in nest list 22: #{fields_text} | #{single_nests}"
- end
- end
-
- def self.parse_nest_groups(fields_text : String) : Nil
- nest_stack = [] of NamedTuple(group_name: String, closing_bracket_index: Int64)
- bracket_pairs = get_bracket_pairs(fields_text, true)
-
- text_index = 0
- regex_index = 0
-
- while regex_result = self.nest_group_pattern.match(fields_text, regex_index)
- raw_match = regex_result[0]
- group_name = regex_result["groupname"]
-
- text_index = regex_result.begin
- regex_index = regex_result.end
-
- if text_index.nil? || regex_index.nil?
- raise FieldsParser::ParseError.new "Received invalid index while parsing nest groups: text_index: #{text_index} | regex_index: #{regex_index}"
- end
-
- offset = raw_match.starts_with?(',') ? 1 : 0
-
- opening_bracket_index = (text_index + group_name.size) + offset
- closing_bracket_index = bracket_pairs[opening_bracket_index]
- content_start = opening_bracket_index + 1
-
- content = fields_text[content_start...closing_bracket_index]
-
- if content.empty?
- raise FieldsParser::ParseError.new "Empty nest group at position #{content_start}"
- else
- content = remove_nest_groups(content)
- end
-
- while nest_stack.size > 0 && closing_bracket_index > nest_stack[nest_stack.size - 1][:closing_bracket_index]
- if nest_stack.size
- nest_stack.pop
- end
- end
-
- group_name.split('/').each do |group_name|
- nest_stack.push({
- group_name: group_name,
- closing_bracket_index: closing_bracket_index,
- })
- end
-
- if !content.empty?
- properties = content.split(',')
-
- properties.each do |prop|
- nest_list = nest_stack.map { |nest_prop| nest_prop[:group_name] }
-
- if !prop.empty?
- if prop.includes?('/')
- parse_single_nests(prop) { |list| nest_list += list }
- else
- nest_list.push prop
- end
- else
- raise FieldsParser::ParseError.new "Empty key in nest list: #{nest_list << prop}"
- end
-
- yield nest_list
- end
- end
- end
- end
-
- def self.remove_nest_groups(text : String) : String
- content_bracket_pairs = get_bracket_pairs(text, false)
-
- content_bracket_pairs.each_key.to_a.reverse.each do |opening_bracket|
- closing_bracket = content_bracket_pairs[opening_bracket]
- last_comma = text.rindex(',', opening_bracket) || 0
-
- text = text[0...last_comma] + text[closing_bracket + 1...text.size]
- end
-
- return text.starts_with?(',') ? text[1...text.size] : text
- end
-
- def self.get_bracket_pairs(text : String, recursive = true) : BracketIndex
- istart = [] of Int64
- bracket_index = BracketIndex.new
-
- text.each_char_with_index do |char, index|
- if char == '('
- istart.push(index.to_i64)
- end
-
- if char == ')'
- begin
- opening = istart.pop
- if recursive || (!recursive && istart.size == 0)
- bracket_index[opening] = index.to_i64
- end
- rescue
- raise FieldsParser::ParseError.new "No matching opening parenthesis at: #{index}"
- end
- end
- end
-
- if istart.size != 0
- idx = istart.pop
- raise FieldsParser::ParseError.new "No matching closing parenthesis at: #{idx}"
- end
-
- return bracket_index
- end
- end
-
- class FieldsGrouper
- alias SkeletonValue = Hash(String, SkeletonValue)
-
- def self.create_json_skeleton(fields_text : String) : SkeletonValue
- root_hash = {} of String => SkeletonValue
-
- FieldsParser.parse_fields(fields_text) do |nest_list|
- current_item = root_hash
- nest_list.each do |key|
- if current_item[key]?
- current_item = current_item[key]
- else
- current_item[key] = {} of String => SkeletonValue
- current_item = current_item[key]
- end
- end
- end
- root_hash
- end
-
- def self.create_grouped_fields_list(json_skeleton : SkeletonValue) : GroupedFieldsList
- grouped_fields_list = GroupedFieldsList.new
- json_skeleton.each do |key, value|
- grouped_fields_list.push key
-
- nested_keys = create_grouped_fields_list(value)
- grouped_fields_list.push nested_keys unless nested_keys.empty?
- end
- return grouped_fields_list
- end
- end
-
- class FilterError < Exception
- end
-
- def self.filter(item : JSON::Any, fields_text : String, in_place : Bool = true)
- skeleton = FieldsGrouper.create_json_skeleton(fields_text)
- grouped_fields_list = FieldsGrouper.create_grouped_fields_list(skeleton)
- filter(item, grouped_fields_list, in_place)
- end
-
- def self.filter(item : JSON::Any, grouped_fields_list : GroupedFieldsList, in_place : Bool = true) : JSON::Any
- item = item.clone unless in_place
-
- if !item.as_h? && !item.as_a?
- raise FilterError.new "Can't filter '#{item}' by #{grouped_fields_list}"
- end
-
- top_level_keys = Array(String).new
- grouped_fields_list.each do |value|
- if value.is_a? String
- top_level_keys.push value
- elsif value.is_a? Array
- if !top_level_keys.empty?
- key_to_filter = top_level_keys.last
-
- if item.as_h?
- filter(item[key_to_filter], value, in_place: true)
- elsif item.as_a?
- item.as_a.each { |arr_item| filter(arr_item[key_to_filter], value, in_place: true) }
- end
- else
- raise FilterError.new "Tried to filter while top level keys list is empty"
- end
- end
- end
-
- if item.as_h?
- item.as_h.select! top_level_keys
- elsif item.as_a?
- item.as_a.map { |value| filter(value, top_level_keys, in_place: true) }
- end
-
- item
- end
-end
diff --git a/src/invidious/helpers/logger.cr b/src/invidious/helpers/logger.cr
index 5d91a258..03349595 100644
--- a/src/invidious/helpers/logger.cr
+++ b/src/invidious/helpers/logger.cr
@@ -1,3 +1,5 @@
+require "colorize"
+
enum LogLevel
All = 0
Trace = 1
@@ -10,21 +12,30 @@ enum LogLevel
end
class Invidious::LogHandler < Kemal::BaseLogHandler
- def initialize(@io : IO = STDOUT, @level = LogLevel::Debug)
+ def initialize(@io : IO = STDOUT, @level = LogLevel::Debug, use_color : Bool = true)
+ Colorize.enabled = use_color
+ Colorize.on_tty_only!
end
def call(context : HTTP::Server::Context)
elapsed_time = Time.measure { call_next(context) }
elapsed_text = elapsed_text(elapsed_time)
- info("#{context.response.status_code} #{context.request.method} #{context.request.resource} #{elapsed_text}")
+ # Default: full path with parameters
+ requested_url = context.request.resource
- context
- end
+ # Try not to log search queries passed as GET parameters during normal use
+ # (They will still be logged if log level is 'Debug' or 'Trace')
+ if @level > LogLevel::Debug && (
+ requested_url.downcase.includes?("search") || requested_url.downcase.includes?("q=")
+ )
+ # Log only the path
+ requested_url = context.request.path
+ end
- def puts(message : String)
- @io << message << '\n'
- @io.flush
+ info("#{context.response.status_code} #{context.request.method} #{requested_url} #{elapsed_text}")
+
+ context
end
def write(message : String)
@@ -32,18 +43,22 @@ class Invidious::LogHandler < Kemal::BaseLogHandler
@io.flush
end
- def set_log_level(level : String)
- @level = LogLevel.parse(level)
- end
-
- def set_log_level(level : LogLevel)
- @level = level
+ def color(level)
+ case level
+ when LogLevel::Trace then :cyan
+ when LogLevel::Debug then :green
+ when LogLevel::Info then :white
+ when LogLevel::Warn then :yellow
+ when LogLevel::Error then :red
+ when LogLevel::Fatal then :magenta
+ else :default
+ end
end
{% for level in %w(trace debug info warn error fatal) %}
def {{level.id}}(message : String)
if LogLevel::{{level.id.capitalize}} >= @level
- puts("#{Time.utc} [{{level.id}}] #{message}")
+ puts("#{Time.utc} [{{level.id}}] #{message}".colorize(color(LogLevel::{{level.id.capitalize}})))
end
end
{% end %}
diff --git a/src/invidious/helpers/macros.cr b/src/invidious/helpers/macros.cr
index 5d426a8b..43e7171b 100644
--- a/src/invidious/helpers/macros.cr
+++ b/src/invidious/helpers/macros.cr
@@ -48,11 +48,26 @@ module JSON::Serializable
end
end
-macro templated(filename, template = "template", navbar_search = true)
+macro templated(_filename, template = "template", navbar_search = true)
navbar_search = {{navbar_search}}
- render "src/invidious/views/#{{{filename}}}.ecr", "src/invidious/views/#{{{template}}}.ecr"
+
+ {{ filename = "src/invidious/views/" + _filename + ".ecr" }}
+ {{ layout = "src/invidious/views/" + template + ".ecr" }}
+
+ __content_filename__ = {{filename}}
+ content = Kilt.render({{filename}})
+ Kilt.render({{layout}})
end
macro rendered(filename)
- render "src/invidious/views/#{{{filename}}}.ecr"
+ Kilt.render("src/invidious/views/#{{{filename}}}.ecr")
+end
+
+# Similar to Kemals halt method but works in a
+# method.
+macro haltf(env, status_code = 200, response = "")
+ {{env}}.response.status_code = {{status_code}}
+ {{env}}.response.print {{response}}
+ {{env}}.response.close
+ return
end
diff --git a/src/invidious/helpers/proxy.cr b/src/invidious/helpers/proxy.cr
deleted file mode 100644
index 3418d887..00000000
--- a/src/invidious/helpers/proxy.cr
+++ /dev/null
@@ -1,316 +0,0 @@
-# See https://github.com/crystal-lang/crystal/issues/2963
-class HTTPProxy
- getter proxy_host : String
- getter proxy_port : Int32
- getter options : Hash(Symbol, String)
- getter tls : OpenSSL::SSL::Context::Client?
-
- def initialize(@proxy_host, @proxy_port = 80, @options = {} of Symbol => String)
- end
-
- def open(host, port, tls = nil, connection_options = {} of Symbol => Float64 | Nil)
- dns_timeout = connection_options.fetch(:dns_timeout, nil)
- connect_timeout = connection_options.fetch(:connect_timeout, nil)
- read_timeout = connection_options.fetch(:read_timeout, nil)
-
- socket = TCPSocket.new @proxy_host, @proxy_port, dns_timeout, connect_timeout
- socket.read_timeout = read_timeout if read_timeout
- socket.sync = true
-
- socket << "CONNECT #{host}:#{port} HTTP/1.1\r\n"
-
- if options[:user]?
- credentials = Base64.strict_encode("#{options[:user]}:#{options[:password]}")
- credentials = "#{credentials}\n".gsub(/\s/, "")
- socket << "Proxy-Authorization: Basic #{credentials}\r\n"
- end
-
- socket << "\r\n"
-
- resp = parse_response(socket)
-
- if resp[:code]? == 200
- {% if !flag?(:without_openssl) %}
- if tls
- tls_socket = OpenSSL::SSL::Socket::Client.new(socket, context: tls, sync_close: true, hostname: host)
- socket = tls_socket
- end
- {% end %}
-
- return socket
- else
- socket.close
- raise IO::Error.new(resp.inspect)
- end
- end
-
- private def parse_response(socket)
- resp = {} of Symbol => Int32 | String | Hash(String, String)
-
- begin
- version, code, reason = socket.gets.as(String).chomp.split(/ /, 3)
-
- headers = {} of String => String
-
- while (line = socket.gets.as(String)) && (line.chomp != "")
- name, value = line.split(/:/, 2)
- headers[name.strip] = value.strip
- end
-
- resp[:version] = version
- resp[:code] = code.to_i
- resp[:reason] = reason
- resp[:headers] = headers
- rescue
- end
-
- return resp
- end
-end
-
-class HTTPClient < HTTP::Client
- def set_proxy(proxy : HTTPProxy)
- begin
- @io = proxy.open(host: @host, port: @port, tls: @tls, connection_options: proxy_connection_options)
- rescue IO::Error
- @io = nil
- end
- end
-
- def unset_proxy
- @io = nil
- end
-
- def proxy_connection_options
- opts = {} of Symbol => Float64 | Nil
-
- opts[:dns_timeout] = @dns_timeout
- opts[:connect_timeout] = @connect_timeout
- opts[:read_timeout] = @read_timeout
-
- return opts
- end
-end
-
-def get_proxies(country_code = "US")
- # return get_spys_proxies(country_code)
- return get_nova_proxies(country_code)
-end
-
-def filter_proxies(proxies)
- proxies.select! do |proxy|
- begin
- client = HTTPClient.new(YT_URL)
- client.before_request { |r| add_yt_headers(r) } if url.host == "www.youtube.com"
- client.read_timeout = 10.seconds
- client.connect_timeout = 10.seconds
-
- proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
- client.set_proxy(proxy)
-
- status_ok = client.head("/").status_code == 200
- client.close
- status_ok
- rescue ex
- false
- end
- end
-
- return proxies
-end
-
-def get_nova_proxies(country_code = "US")
- country_code = country_code.downcase
- client = HTTP::Client.new(URI.parse("https://www.proxynova.com"))
- client.read_timeout = 10.seconds
- client.connect_timeout = 10.seconds
-
- headers = HTTP::Headers.new
- headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36"
- headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8"
- headers["Accept-Language"] = "Accept-Language: en-US,en;q=0.9"
- headers["Host"] = "www.proxynova.com"
- headers["Origin"] = "https://www.proxynova.com"
- headers["Referer"] = "https://www.proxynova.com/proxy-server-list/country-#{country_code}/"
-
- response = client.get("/proxy-server-list/country-#{country_code}/", headers)
- client.close
- document = XML.parse_html(response.body)
-
- proxies = [] of {ip: String, port: Int32, score: Float64}
- document.xpath_nodes(%q(//tr[@data-proxy-id])).each do |node|
- ip = node.xpath_node(%q(.//td/abbr/script)).not_nil!.content
- ip = ip.match(/document\.write\('(?<sub1>[^']+)'.substr\(8\) \+ '(?<sub2>[^']+)'/).not_nil!
- ip = "#{ip["sub1"][8..-1]}#{ip["sub2"]}"
- port = node.xpath_node(%q(.//td[2])).not_nil!.content.strip.to_i
-
- anchor = node.xpath_node(%q(.//td[4]/div)).not_nil!
- speed = anchor["data-value"].to_f
- latency = anchor["title"].to_f
- uptime = node.xpath_node(%q(.//td[5]/span)).not_nil!.content.rchop("%").to_f
-
- # TODO: Tweak me
- score = (uptime*4 + speed*2 + latency)/7
- proxies << {ip: ip, port: port, score: score}
- end
-
- # proxies = proxies.sort_by { |proxy| proxy[:score] }.reverse
- return proxies
-end
-
-def get_spys_proxies(country_code = "US")
- client = HTTP::Client.new(URI.parse("http://spys.one"))
- client.read_timeout = 10.seconds
- client.connect_timeout = 10.seconds
-
- headers = HTTP::Headers.new
- headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36"
- headers["Accept"] = "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8"
- headers["Accept-Language"] = "Accept-Language: en-US,en;q=0.9"
- headers["Host"] = "spys.one"
- headers["Origin"] = "http://spys.one"
- headers["Referer"] = "http://spys.one/free-proxy-list/#{country_code}/"
- headers["Content-Type"] = "application/x-www-form-urlencoded"
- body = {
- "xpp" => "5",
- "xf1" => "0",
- "xf2" => "0",
- "xf4" => "0",
- "xf5" => "1",
- }
-
- response = client.post("/free-proxy-list/#{country_code}/", headers, form: body)
- client.close
- 20.times do
- if response.status_code == 200
- break
- end
- response = client.post("/free-proxy-list/#{country_code}/", headers, form: body)
- end
-
- response = XML.parse_html(response.body)
-
- mapping = response.xpath_node(%q(.//body/script)).not_nil!.content
- mapping = mapping.match(/\}\('(?<p>[^']+)',\d+,\d+,'(?<x>[^']+)'/).not_nil!
- p = mapping["p"].not_nil!
- x = mapping["x"].not_nil!
- mapping = decrypt_port(p, x)
-
- proxies = [] of {ip: String, port: Int32, score: Float64}
- response = response.xpath_node(%q(//tr/td/table)).not_nil!
- response.xpath_nodes(%q(.//tr)).each do |node|
- if !node["onmouseover"]?
- next
- end
-
- ip = node.xpath_node(%q(.//td[1]/font[2])).to_s.match(/<font class="spy14">(?<address>[^<]+)</).not_nil!["address"]
- encrypted_port = node.xpath_node(%q(.//td[1]/font[2]/script)).not_nil!.content
- encrypted_port = encrypted_port.match(/<\\\/font>"\+(?<encrypted_port>[\d\D]+)\)$/).not_nil!["encrypted_port"]
-
- port = ""
- encrypted_port.split("+").each do |number|
- number = number.delete("()")
- left_side, right_side = number.split("^")
- result = mapping[left_side] ^ mapping[right_side]
- port = "#{port}#{result}"
- end
- port = port.to_i
-
- latency = node.xpath_node(%q(.//td[6])).not_nil!.content.to_f
- speed = node.xpath_node(%q(.//td[7]/font/table)).not_nil!["width"].to_f
- uptime = node.xpath_node(%q(.//td[8]/font/acronym)).not_nil!
-
- # Skip proxies that are down
- if uptime["title"].ends_with? "?"
- next
- end
-
- if md = uptime.content.match(/^\d+/)
- uptime = md[0].to_f
- else
- next
- end
-
- score = (uptime*4 + speed*2 + latency)/7
-
- proxies << {ip: ip, port: port, score: score}
- end
-
- proxies = proxies.sort_by { |proxy| proxy[:score] }.reverse
- return proxies
-end
-
-def decrypt_port(p, x)
- x = x.split("^")
- s = {} of String => String
-
- 60.times do |i|
- if x[i]?.try &.empty?
- s[y_func(i)] = y_func(i)
- else
- s[y_func(i)] = x[i]
- end
- end
-
- x = s
- p = p.gsub(/\b\w+\b/, x)
-
- p = p.split(";")
- p = p.map { |item| item.split("=") }
-
- mapping = {} of String => Int32
- p.each do |item|
- if item == [""]
- next
- end
-
- key = item[0]
- value = item[1]
- value = value.split("^")
-
- if value.size == 1
- value = value[0].to_i
- else
- left_side = value[0].to_i?
- left_side ||= mapping[value[0]]
- right_side = value[1].to_i?
- right_side ||= mapping[value[1]]
-
- value = left_side ^ right_side
- end
-
- mapping[key] = value
- end
-
- return mapping
-end
-
-def y_func(c)
- return (c < 60 ? "" : y_func((c/60).to_i)) + ((c = c % 60) > 35 ? ((c.to_u8 + 29).unsafe_chr) : c.to_s(36))
-end
-
-PROXY_LIST = {
- "GB" => [{ip: "147.135.206.233", port: 3128}, {ip: "167.114.180.102", port: 8080}, {ip: "176.35.250.108", port: 8080}, {ip: "5.148.128.44", port: 80}, {ip: "62.7.85.234", port: 8080}, {ip: "88.150.135.10", port: 36624}],
- "DE" => [{ip: "138.201.223.250", port: 31288}, {ip: "138.68.73.59", port: 32574}, {ip: "159.69.211.173", port: 3128}, {ip: "173.249.43.105", port: 3128}, {ip: "212.202.244.90", port: 8080}, {ip: "5.56.18.35", port: 38827}],
- "FR" => [{ip: "137.74.254.242", port: 3128}, {ip: "151.80.143.155", port: 53281}, {ip: "178.33.150.97", port: 3128}, {ip: "37.187.2.31", port: 3128}, {ip: "5.135.164.72", port: 3128}, {ip: "5.39.91.73", port: 3128}, {ip: "51.38.162.2", port: 32231}, {ip: "51.38.217.121", port: 808}, {ip: "51.75.109.81", port: 3128}, {ip: "51.75.109.82", port: 3128}, {ip: "51.75.109.83", port: 3128}, {ip: "51.75.109.84", port: 3128}, {ip: "51.75.109.86", port: 3128}, {ip: "51.75.109.88", port: 3128}, {ip: "51.75.109.90", port: 3128}, {ip: "62.210.167.3", port: 3128}, {ip: "90.63.218.232", port: 8080}, {ip: "91.134.165.198", port: 9999}],
- "IN" => [{ip: "1.186.151.206", port: 36253}, {ip: "1.186.63.130", port: 39142}, {ip: "103.105.40.1", port: 16538}, {ip: "103.105.40.153", port: 16538}, {ip: "103.106.148.203", port: 60227}, {ip: "103.106.148.207", port: 51451}, {ip: "103.12.246.12", port: 8080}, {ip: "103.14.235.109", port: 8080}, {ip: "103.14.235.26", port: 8080}, {ip: "103.198.172.4", port: 50820}, {ip: "103.205.112.1", port: 23500}, {ip: "103.209.64.19", port: 6666}, {ip: "103.211.76.5", port: 8080}, {ip: "103.216.82.19", port: 6666}, {ip: "103.216.82.190", port: 6666}, {ip: "103.216.82.209", port: 54806}, {ip: "103.216.82.214", port: 6666}, {ip: "103.216.82.37", port: 6666}, {ip: "103.216.82.44", port: 8080}, {ip: "103.216.82.50", port: 53281}, {ip: "103.22.173.230", port: 8080}, {ip: "103.224.38.2", port: 83}, {ip: "103.226.142.90", port: 41386}, {ip: "103.236.114.38", port: 49638}, {ip: "103.240.161.107", port: 6666}, {ip: "103.240.161.108", port: 6666}, {ip: "103.240.161.109", port: 6666}, {ip: "103.240.161.59", port: 48809}, {ip: "103.245.198.101", port: 8080}, {ip: "103.250.148.82", port: 6666}, {ip: "103.251.58.51", port: 61489}, {ip: "103.253.169.115", port: 32731}, {ip: "103.253.211.182", port: 8080}, {ip: "103.253.211.182", port: 80}, {ip: "103.255.234.169", port: 39847}, {ip: "103.42.161.118", port: 8080}, {ip: "103.42.162.30", port: 8080}, {ip: "103.42.162.50", port: 8080}, {ip: "103.42.162.58", port: 8080}, {ip: "103.46.233.12", port: 83}, {ip: "103.46.233.13", port: 83}, {ip: "103.46.233.16", port: 83}, {ip: "103.46.233.17", port: 83}, {ip: "103.46.233.21", port: 83}, {ip: "103.46.233.23", port: 83}, {ip: "103.46.233.29", port: 81}, {ip: "103.46.233.29", port: 83}, {ip: "103.46.233.50", port: 83}, {ip: "103.47.153.87", port: 8080}, {ip: "103.47.66.2", port: 39804}, {ip: "103.49.53.1", port: 81}, {ip: "103.52.220.1", port: 49068}, {ip: "103.56.228.166", port: 53281}, {ip: "103.56.30.128", port: 8080}, {ip: "103.65.193.17", port: 50862}, {ip: "103.65.195.1", port: 33960}, {ip: "103.69.220.14", port: 3128}, {ip: "103.70.128.84", port: 8080}, {ip: "103.70.128.86", port: 8080}, {ip: "103.70.131.74", port: 8080}, {ip: "103.70.146.250", port: 59563}, {ip: "103.72.216.194", port: 38345}, {ip: "103.75.161.38", port: 21776}, {ip: "103.76.253.155", port: 3128}, {ip: "103.87.104.137", port: 8080}, {ip: "110.235.198.3", port: 57660}, {ip: "114.69.229.161", port: 8080}, {ip: "117.196.231.201", port: 37769}, {ip: "117.211.166.214", port: 3128}, {ip: "117.240.175.51", port: 3128}, {ip: "117.240.210.155", port: 53281}, {ip: "117.240.59.115", port: 36127}, {ip: "117.242.154.73", port: 33889}, {ip: "117.244.15.243", port: 3128}, {ip: "119.235.54.3", port: 8080}, {ip: "120.138.117.102", port: 59308}, {ip: "123.108.200.185", port: 83}, {ip: "123.108.200.217", port: 82}, {ip: "123.176.43.218", port: 40524}, {ip: "125.21.43.82", port: 8080}, {ip: "125.62.192.225", port: 82}, {ip: "125.62.192.33", port: 84}, {ip: "125.62.194.1", port: 83}, {ip: "125.62.213.134", port: 82}, {ip: "125.62.213.18", port: 83}, {ip: "125.62.213.201", port: 84}, {ip: "125.62.213.242", port: 83}, {ip: "125.62.214.185", port: 84}, {ip: "139.5.26.27", port: 53281}, {ip: "14.102.67.101", port: 30337}, {ip: "14.142.122.134", port: 8080}, {ip: "150.129.114.194", port: 6666}, {ip: "150.129.151.62", port: 6666}, {ip: "150.129.171.115", port: 6666}, {ip: "150.129.201.30", port: 6666}, {ip: "157.119.207.38", port: 53281}, {ip: "175.100.185.151", port: 53281}, {ip: "182.18.177.114", port: 56173}, {ip: "182.73.194.170", port: 8080}, {ip: "182.74.85.230", port: 51214}, {ip: "183.82.116.56", port: 8080}, {ip: "183.82.32.56", port: 49551}, {ip: "183.87.14.229", port: 53281}, {ip: "183.87.14.250", port: 44915}, {ip: "202.134.160.168", port: 8080}, {ip: "202.134.166.1", port: 8080}, {ip: "202.134.180.50", port: 8080}, {ip: "202.62.84.210", port: 53281}, {ip: "203.192.193.225", port: 8080}, {ip: "203.192.195.14", port: 31062}, {ip: "203.192.217.11", port: 8080}, {ip: "223.196.83.182", port: 53281}, {ip: "27.116.20.169", port: 36630}, {ip: "27.116.20.209", port: 36630}, {ip: "27.116.51.21", port: 36033}, {ip: "43.224.8.114", port: 50333}, {ip: "43.224.8.116", port: 6666}, {ip: "43.224.8.124", port: 6666}, {ip: "43.224.8.86", port: 6666}, {ip: "43.225.20.73", port: 8080}, {ip: "43.225.23.26", port: 8080}, {ip: "43.230.196.98", port: 36569}, {ip: "43.240.5.225", port: 31777}, {ip: "43.241.28.248", port: 8080}, {ip: "43.242.209.201", port: 8080}, {ip: "43.246.139.82", port: 8080}, {ip: "43.248.73.86", port: 53281}, {ip: "43.251.170.145", port: 54059}, {ip: "45.112.57.230", port: 61222}, {ip: "45.115.171.30", port: 47949}, {ip: "45.121.29.254", port: 54858}, {ip: "45.123.26.146", port: 53281}, {ip: "45.125.61.193", port: 32804}, {ip: "45.125.61.209", port: 32804}, {ip: "45.127.121.194", port: 53281}, {ip: "45.250.226.14", port: 3128}, {ip: "45.250.226.38", port: 8080}, {ip: "45.250.226.47", port: 8080}, {ip: "45.250.226.55", port: 8080}, {ip: "49.249.251.86", port: 53281}],
- "CN" => [{ip: "182.61.170.45", port: 3128}],
- "RU" => [{ip: "109.106.139.225", port: 45689}, {ip: "109.161.48.228", port: 53281}, {ip: "109.167.224.198", port: 51919}, {ip: "109.172.57.250", port: 23500}, {ip: "109.194.2.126", port: 61822}, {ip: "109.195.150.128", port: 37564}, {ip: "109.201.96.171", port: 31773}, {ip: "109.201.97.204", port: 41258}, {ip: "109.201.97.235", port: 39125}, {ip: "109.206.140.74", port: 45991}, {ip: "109.206.148.31", port: 30797}, {ip: "109.69.75.5", port: 46347}, {ip: "109.71.181.170", port: 53983}, {ip: "109.74.132.190", port: 42663}, {ip: "109.74.143.45", port: 36529}, {ip: "109.75.140.158", port: 59916}, {ip: "109.95.84.114", port: 52125}, {ip: "130.255.12.24", port: 31004}, {ip: "134.19.147.72", port: 44812}, {ip: "134.90.181.7", port: 54353}, {ip: "145.255.6.171", port: 31252}, {ip: "146.120.227.3", port: 8080}, {ip: "149.255.112.194", port: 48968}, {ip: "158.46.127.222", port: 52574}, {ip: "158.46.43.144", port: 39120}, {ip: "158.58.130.185", port: 50016}, {ip: "158.58.132.12", port: 56962}, {ip: "158.58.133.106", port: 41258}, {ip: "158.58.133.13", port: 21213}, {ip: "176.101.0.47", port: 34471}, {ip: "176.101.89.226", port: 33470}, {ip: "176.106.12.65", port: 30120}, {ip: "176.107.80.110", port: 58901}, {ip: "176.110.121.9", port: 46322}, {ip: "176.110.121.90", port: 21776}, {ip: "176.111.97.18", port: 8080}, {ip: "176.112.106.230", port: 33996}, {ip: "176.112.110.40", port: 61142}, {ip: "176.113.116.70", port: 55589}, {ip: "176.113.27.192", port: 47337}, {ip: "176.115.197.118", port: 8080}, {ip: "176.117.255.182", port: 53100}, {ip: "176.120.200.69", port: 44331}, {ip: "176.124.123.93", port: 41258}, {ip: "176.192.124.98", port: 60787}, {ip: "176.192.5.238", port: 61227}, {ip: "176.192.8.206", port: 39422}, {ip: "176.193.15.94", port: 8080}, {ip: "176.196.195.170", port: 48129}, {ip: "176.196.198.154", port: 35252}, {ip: "176.196.238.234", port: 44648}, {ip: "176.196.239.46", port: 35656}, {ip: "176.196.246.6", port: 53281}, {ip: "176.196.84.138", port: 51336}, {ip: "176.197.145.246", port: 32649}, {ip: "176.197.99.142", port: 47278}, {ip: "176.215.1.108", port: 60339}, {ip: "176.215.170.147", port: 35604}, {ip: "176.56.23.14", port: 35340}, {ip: "176.62.185.54", port: 53883}, {ip: "176.74.13.110", port: 8080}, {ip: "178.130.29.226", port: 53295}, {ip: "178.170.254.178", port: 46788}, {ip: "178.213.13.136", port: 53281}, {ip: "178.218.104.8", port: 49707}, {ip: "178.219.183.163", port: 8080}, {ip: "178.237.180.34", port: 57307}, {ip: "178.57.101.212", port: 38020}, {ip: "178.57.101.235", port: 31309}, {ip: "178.64.190.133", port: 46688}, {ip: "178.75.1.111", port: 50411}, {ip: "178.75.27.131", port: 41879}, {ip: "185.13.35.178", port: 40654}, {ip: "185.15.189.67", port: 30215}, {ip: "185.175.119.137", port: 41258}, {ip: "185.18.111.194", port: 41258}, {ip: "185.19.176.237", port: 53281}, {ip: "185.190.40.115", port: 31747}, {ip: "185.216.195.134", port: 61287}, {ip: "185.22.172.94", port: 10010}, {ip: "185.22.172.94", port: 1448}, {ip: "185.22.174.65", port: 10010}, {ip: "185.22.174.65", port: 1448}, {ip: "185.23.64.100", port: 3130}, {ip: "185.23.82.39", port: 59248}, {ip: "185.233.94.105", port: 59288}, {ip: "185.233.94.146", port: 57736}, {ip: "185.3.68.54", port: 53500}, {ip: "185.32.120.177", port: 60724}, {ip: "185.34.20.164", port: 53700}, {ip: "185.34.23.43", port: 63238}, {ip: "185.51.60.141", port: 39935}, {ip: "185.61.92.228", port: 33060}, {ip: "185.61.93.67", port: 49107}, {ip: "185.7.233.66", port: 53504}, {ip: "185.72.225.10", port: 56285}, {ip: "185.75.5.158", port: 60819}, {ip: "185.9.86.186", port: 39345}, {ip: "188.133.136.10", port: 47113}, {ip: "188.168.75.254", port: 56899}, {ip: "188.170.41.6", port: 60332}, {ip: "188.187.189.142", port: 38264}, {ip: "188.234.151.103", port: 8080}, {ip: "188.235.11.88", port: 57143}, {ip: "188.235.137.196", port: 23500}, {ip: "188.244.175.2", port: 8080}, {ip: "188.255.82.136", port: 53281}, {ip: "188.43.4.117", port: 60577}, {ip: "188.68.95.166", port: 41258}, {ip: "188.92.242.180", port: 52048}, {ip: "188.93.242.213", port: 49774}, {ip: "192.162.193.243", port: 36910}, {ip: "192.162.214.11", port: 41258}, {ip: "193.106.170.133", port: 38591}, {ip: "193.232.113.244", port: 40412}, {ip: "193.232.234.130", port: 61932}, {ip: "193.242.177.105", port: 53281}, {ip: "193.242.178.50", port: 52376}, {ip: "193.242.178.90", port: 8080}, {ip: "193.33.101.152", port: 34611}, {ip: "194.114.128.149", port: 61213}, {ip: "194.135.15.146", port: 59328}, {ip: "194.135.216.178", port: 56805}, {ip: "194.135.75.74", port: 41258}, {ip: "194.146.201.67", port: 53281}, {ip: "194.186.18.46", port: 56408}, {ip: "194.186.20.62", port: 21231}, {ip: "194.190.171.214", port: 43960}, {ip: "194.9.27.82", port: 42720}, {ip: "195.133.232.58", port: 41733}, {ip: "195.14.114.116", port: 59530}, {ip: "195.14.114.24", port: 56897}, {ip: "195.158.250.97", port: 41582}, {ip: "195.16.48.142", port: 36083}, {ip: "195.191.183.169", port: 47238}, {ip: "195.206.45.112", port: 53281}, {ip: "195.208.172.70", port: 8080}, {ip: "195.209.141.67", port: 31927}, {ip: "195.209.176.2", port: 8080}, {ip: "195.210.144.166", port: 30088}, {ip: "195.211.160.88", port: 44464}, {ip: "195.218.144.182", port: 31705}, {ip: "195.46.168.147", port: 8080}, {ip: "195.9.188.78", port: 53281}, {ip: "195.9.209.10", port: 35242}, {ip: "195.9.223.246", port: 52098}, {ip: "195.9.237.66", port: 8080}, {ip: "195.9.91.66", port: 33199}, {ip: "195.91.132.20", port: 19600}, {ip: "195.98.183.82", port: 30953}, {ip: "212.104.82.246", port: 36495}, {ip: "212.119.229.18", port: 33852}, {ip: "212.13.97.122", port: 30466}, {ip: "212.19.21.19", port: 53264}, {ip: "212.19.5.157", port: 58442}, {ip: "212.19.8.223", port: 30281}, {ip: "212.19.8.239", port: 55602}, {ip: "212.192.202.207", port: 4550}, {ip: "212.22.80.224", port: 34822}, {ip: "212.26.247.178", port: 38418}, {ip: "212.33.228.161", port: 37971}, {ip: "212.33.243.83", port: 38605}, {ip: "212.34.53.126", port: 44369}, {ip: "212.5.107.81", port: 56481}, {ip: "212.7.230.7", port: 51405}, {ip: "212.77.138.161", port: 41258}, {ip: "213.108.221.201", port: 32800}, {ip: "213.109.7.135", port: 59918}, {ip: "213.128.9.204", port: 35549}, {ip: "213.134.196.12", port: 38723}, {ip: "213.168.37.86", port: 8080}, {ip: "213.187.118.184", port: 53281}, {ip: "213.21.23.98", port: 53281}, {ip: "213.210.67.166", port: 53281}, {ip: "213.234.0.242", port: 56503}, {ip: "213.247.192.131", port: 41258}, {ip: "213.251.226.208", port: 56900}, {ip: "213.33.155.80", port: 44387}, {ip: "213.33.199.194", port: 36411}, {ip: "213.33.224.82", port: 8080}, {ip: "213.59.153.19", port: 53281}, {ip: "217.10.45.103", port: 8080}, {ip: "217.107.197.39", port: 33628}, {ip: "217.116.60.66", port: 21231}, {ip: "217.195.87.58", port: 41258}, {ip: "217.197.239.54", port: 34463}, {ip: "217.74.161.42", port: 34175}, {ip: "217.8.84.76", port: 46378}, {ip: "31.131.67.14", port: 8080}, {ip: "31.132.127.142", port: 35432}, {ip: "31.132.218.252", port: 32423}, {ip: "31.173.17.118", port: 51317}, {ip: "31.193.124.70", port: 53281}, {ip: "31.210.211.147", port: 8080}, {ip: "31.220.183.217", port: 53281}, {ip: "31.29.212.82", port: 35066}, {ip: "31.42.254.24", port: 30912}, {ip: "31.47.189.14", port: 38473}, {ip: "37.113.129.98", port: 41665}, {ip: "37.192.103.164", port: 34835}, {ip: "37.192.194.50", port: 50165}, {ip: "37.192.99.151", port: 51417}, {ip: "37.205.83.91", port: 35888}, {ip: "37.233.85.155", port: 53281}, {ip: "37.235.167.66", port: 53281}, {ip: "37.235.65.2", port: 47816}, {ip: "37.235.67.178", port: 34450}, {ip: "37.9.134.133", port: 41262}, {ip: "46.150.174.90", port: 53281}, {ip: "46.151.156.198", port: 56013}, {ip: "46.16.226.10", port: 8080}, {ip: "46.163.131.55", port: 48306}, {ip: "46.173.191.51", port: 53281}, {ip: "46.174.222.61", port: 34977}, {ip: "46.180.96.79", port: 42319}, {ip: "46.181.151.79", port: 39386}, {ip: "46.21.74.130", port: 8080}, {ip: "46.227.162.98", port: 51558}, {ip: "46.229.187.169", port: 53281}, {ip: "46.229.67.198", port: 47437}, {ip: "46.243.179.221", port: 41598}, {ip: "46.254.217.54", port: 53281}, {ip: "46.32.68.188", port: 39707}, {ip: "46.39.224.112", port: 36765}, {ip: "46.63.162.171", port: 8080}, {ip: "46.73.33.253", port: 8080}, {ip: "5.128.32.12", port: 51959}, {ip: "5.129.155.3", port: 51390}, {ip: "5.129.16.27", port: 48935}, {ip: "5.141.81.65", port: 61853}, {ip: "5.16.15.234", port: 8080}, {ip: "5.167.51.235", port: 8080}, {ip: "5.167.96.238", port: 3128}, {ip: "5.19.165.235", port: 30793}, {ip: "5.35.93.157", port: 31773}, {ip: "5.59.137.90", port: 8888}, {ip: "5.8.207.160", port: 57192}, {ip: "62.122.97.66", port: 59143}, {ip: "62.148.151.253", port: 53570}, {ip: "62.152.85.158", port: 31156}, {ip: "62.165.54.153", port: 55522}, {ip: "62.173.140.14", port: 8080}, {ip: "62.173.155.206", port: 41258}, {ip: "62.182.206.19", port: 37715}, {ip: "62.213.14.166", port: 8080}, {ip: "62.76.123.224", port: 8080}, {ip: "77.221.220.133", port: 44331}, {ip: "77.232.153.248", port: 60950}, {ip: "77.233.10.37", port: 54210}, {ip: "77.244.27.109", port: 47554}, {ip: "77.37.142.203", port: 53281}, {ip: "77.39.29.29", port: 49243}, {ip: "77.75.6.34", port: 8080}, {ip: "77.87.102.7", port: 42601}, {ip: "77.94.121.212", port: 36896}, {ip: "77.94.121.51", port: 45293}, {ip: "78.110.154.177", port: 59888}, {ip: "78.140.201.226", port: 8090}, {ip: "78.153.4.122", port: 9001}, {ip: "78.156.225.170", port: 41258}, {ip: "78.156.243.146", port: 59730}, {ip: "78.29.14.201", port: 39001}, {ip: "78.81.24.112", port: 8080}, {ip: "78.85.36.203", port: 8080}, {ip: "79.104.219.125", port: 3128}, {ip: "79.104.55.134", port: 8080}, {ip: "79.137.181.170", port: 8080}, {ip: "79.173.124.194", port: 47832}, {ip: "79.173.124.207", port: 53281}, {ip: "79.174.186.168", port: 45710}, {ip: "79.175.51.13", port: 54853}, {ip: "79.175.57.77", port: 55477}, {ip: "80.234.107.118", port: 56952}, {ip: "80.237.6.1", port: 34880}, {ip: "80.243.14.182", port: 49320}, {ip: "80.251.48.215", port: 45157}, {ip: "80.254.121.66", port: 41055}, {ip: "80.254.125.236", port: 80}, {ip: "80.72.121.185", port: 52379}, {ip: "80.89.133.210", port: 3128}, {ip: "80.91.17.113", port: 41258}, {ip: "81.162.61.166", port: 40392}, {ip: "81.163.57.121", port: 41258}, {ip: "81.163.57.46", port: 41258}, {ip: "81.163.62.136", port: 41258}, {ip: "81.23.112.98", port: 55269}, {ip: "81.23.118.106", port: 60427}, {ip: "81.23.177.245", port: 8080}, {ip: "81.24.126.166", port: 8080}, {ip: "81.30.216.147", port: 41258}, {ip: "81.95.131.10", port: 44292}, {ip: "82.114.125.22", port: 8080}, {ip: "82.151.208.20", port: 8080}, {ip: "83.221.216.110", port: 47326}, {ip: "83.246.139.24", port: 8080}, {ip: "83.97.108.8", port: 41258}, {ip: "84.22.154.76", port: 8080}, {ip: "84.52.110.36", port: 38674}, {ip: "84.52.74.194", port: 8080}, {ip: "84.52.77.227", port: 41806}, {ip: "84.52.79.166", port: 43548}, {ip: "84.52.84.157", port: 44331}, {ip: "84.52.88.125", port: 32666}, {ip: "85.113.48.148", port: 8080}, {ip: "85.113.49.220", port: 8080}, {ip: "85.12.193.210", port: 58470}, {ip: "85.15.179.5", port: 8080}, {ip: "85.173.244.102", port: 53281}, {ip: "85.174.227.52", port: 59280}, {ip: "85.192.184.133", port: 8080}, {ip: "85.192.184.133", port: 80}, {ip: "85.21.240.193", port: 55820}, {ip: "85.21.63.219", port: 53281}, {ip: "85.235.190.18", port: 42494}, {ip: "85.237.56.193", port: 8080}, {ip: "85.91.119.6", port: 8080}, {ip: "86.102.116.30", port: 8080}, {ip: "86.110.30.146", port: 38109}, {ip: "87.117.3.129", port: 3128}, {ip: "87.225.108.195", port: 8080}, {ip: "87.228.103.111", port: 8080}, {ip: "87.228.103.43", port: 8080}, {ip: "87.229.143.10", port: 48872}, {ip: "87.249.205.103", port: 8080}, {ip: "87.249.21.193", port: 43079}, {ip: "87.255.13.217", port: 8080}, {ip: "88.147.159.167", port: 53281}, {ip: "88.200.225.32", port: 38583}, {ip: "88.204.59.177", port: 32666}, {ip: "88.84.209.69", port: 30819}, {ip: "88.87.72.72", port: 8080}, {ip: "88.87.79.20", port: 8080}, {ip: "88.87.91.163", port: 48513}, {ip: "88.87.93.20", port: 33277}, {ip: "89.109.12.82", port: 47972}, {ip: "89.109.21.43", port: 9090}, {ip: "89.109.239.183", port: 41041}, {ip: "89.109.54.137", port: 36469}, {ip: "89.17.37.218", port: 52957}, {ip: "89.189.130.103", port: 32626}, {ip: "89.189.159.214", port: 42530}, {ip: "89.189.174.121", port: 52636}, {ip: "89.23.18.29", port: 53281}, {ip: "89.249.251.21", port: 3128}, {ip: "89.250.149.114", port: 60981}, {ip: "89.250.17.209", port: 8080}, {ip: "89.250.19.173", port: 8080}, {ip: "90.150.87.172", port: 81}, {ip: "90.154.125.173", port: 33078}, {ip: "90.188.38.81", port: 60585}, {ip: "90.189.151.183", port: 32601}, {ip: "91.103.208.114", port: 57063}, {ip: "91.122.100.222", port: 44331}, {ip: "91.122.207.229", port: 8080}, {ip: "91.144.139.93", port: 3128}, {ip: "91.144.142.19", port: 44617}, {ip: "91.146.16.54", port: 57902}, {ip: "91.190.116.194", port: 38783}, {ip: "91.190.80.100", port: 31659}, {ip: "91.190.85.97", port: 34286}, {ip: "91.203.36.188", port: 8080}, {ip: "91.205.131.102", port: 8080}, {ip: "91.205.146.25", port: 37501}, {ip: "91.210.94.212", port: 52635}, {ip: "91.213.23.110", port: 8080}, {ip: "91.215.22.51", port: 53305}, {ip: "91.217.42.3", port: 8080}, {ip: "91.217.42.4", port: 8080}, {ip: "91.220.135.146", port: 41258}, {ip: "91.222.167.213", port: 38057}, {ip: "91.226.140.71", port: 33199}, {ip: "91.235.7.216", port: 59067}, {ip: "92.124.195.22", port: 3128}, {ip: "92.126.193.180", port: 8080}, {ip: "92.241.110.223", port: 53281}, {ip: "92.252.240.1", port: 53281}, {ip: "92.255.164.187", port: 3128}, {ip: "92.255.195.57", port: 53281}, {ip: "92.255.229.146", port: 55785}, {ip: "92.255.5.2", port: 41012}, {ip: "92.38.32.36", port: 56113}, {ip: "92.39.138.98", port: 31150}, {ip: "92.51.16.155", port: 46202}, {ip: "92.55.59.63", port: 33030}, {ip: "93.170.112.200", port: 47995}, {ip: "93.183.86.185", port: 53281}, {ip: "93.188.45.157", port: 8080}, {ip: "93.81.246.5", port: 53281}, {ip: "93.91.112.247", port: 41258}, {ip: "94.127.217.66", port: 40115}, {ip: "94.154.85.214", port: 8080}, {ip: "94.180.106.94", port: 32767}, {ip: "94.180.249.187", port: 38051}, {ip: "94.230.243.6", port: 8080}, {ip: "94.232.57.231", port: 51064}, {ip: "94.24.244.170", port: 48936}, {ip: "94.242.55.108", port: 10010}, {ip: "94.242.55.108", port: 1448}, {ip: "94.242.57.136", port: 10010}, {ip: "94.242.57.136", port: 1448}, {ip: "94.242.58.108", port: 10010}, {ip: "94.242.58.108", port: 1448}, {ip: "94.242.58.14", port: 10010}, {ip: "94.242.58.14", port: 1448}, {ip: "94.242.58.142", port: 10010}, {ip: "94.242.58.142", port: 1448}, {ip: "94.242.59.245", port: 10010}, {ip: "94.242.59.245", port: 1448}, {ip: "94.247.241.70", port: 53640}, {ip: "94.247.62.165", port: 33176}, {ip: "94.253.13.228", port: 54935}, {ip: "94.253.14.187", port: 55045}, {ip: "94.28.94.154", port: 46966}, {ip: "94.73.217.125", port: 40858}, {ip: "95.140.19.9", port: 8080}, {ip: "95.140.20.94", port: 33994}, {ip: "95.154.137.66", port: 41258}, {ip: "95.154.159.119", port: 44242}, {ip: "95.154.82.254", port: 52484}, {ip: "95.161.157.227", port: 43170}, {ip: "95.161.182.146", port: 33877}, {ip: "95.161.189.26", port: 61522}, {ip: "95.165.163.146", port: 8888}, {ip: "95.165.172.90", port: 60496}, {ip: "95.165.182.18", port: 38950}, {ip: "95.165.203.222", port: 33805}, {ip: "95.165.244.122", port: 58162}, {ip: "95.167.123.54", port: 58664}, {ip: "95.167.241.242", port: 49636}, {ip: "95.171.1.92", port: 35956}, {ip: "95.172.52.230", port: 35989}, {ip: "95.181.35.30", port: 40804}, {ip: "95.181.56.178", port: 39144}, {ip: "95.181.75.228", port: 53281}, {ip: "95.188.74.194", port: 57122}, {ip: "95.189.112.214", port: 35508}, {ip: "95.31.10.247", port: 30711}, {ip: "95.31.197.77", port: 41651}, {ip: "95.31.2.199", port: 33632}, {ip: "95.71.125.50", port: 49882}, {ip: "95.73.62.13", port: 32185}, {ip: "95.79.36.55", port: 44861}, {ip: "95.79.55.196", port: 53281}, {ip: "95.79.99.148", port: 3128}, {ip: "95.80.65.39", port: 43555}, {ip: "95.80.93.44", port: 41258}, {ip: "95.80.98.41", port: 8080}, {ip: "95.83.156.250", port: 58438}, {ip: "95.84.128.25", port: 33765}, {ip: "95.84.154.73", port: 57423}],
- "CA" => [{ip: "144.217.161.149", port: 8080}, {ip: "24.37.9.6", port: 54154}, {ip: "54.39.138.144", port: 3128}, {ip: "54.39.138.145", port: 3128}, {ip: "54.39.138.151", port: 3128}, {ip: "54.39.138.152", port: 3128}, {ip: "54.39.138.153", port: 3128}, {ip: "54.39.138.154", port: 3128}, {ip: "54.39.138.155", port: 3128}, {ip: "54.39.138.156", port: 3128}, {ip: "54.39.138.157", port: 3128}, {ip: "54.39.53.104", port: 3128}, {ip: "66.70.167.113", port: 3128}, {ip: "66.70.167.116", port: 3128}, {ip: "66.70.167.117", port: 3128}, {ip: "66.70.167.119", port: 3128}, {ip: "66.70.167.120", port: 3128}, {ip: "66.70.167.125", port: 3128}, {ip: "66.70.188.148", port: 3128}, {ip: "70.35.213.229", port: 36127}, {ip: "70.65.233.174", port: 8080}, {ip: "72.139.24.66", port: 38861}, {ip: "74.15.191.160", port: 41564}],
- "JP" => [{ip: "47.91.20.67", port: 8080}, {ip: "61.118.35.94", port: 55725}],
- "IT" => [{ip: "109.70.201.97", port: 53517}, {ip: "176.31.82.212", port: 8080}, {ip: "185.132.228.118", port: 55583}, {ip: "185.49.58.88", port: 56006}, {ip: "185.94.89.179", port: 41258}, {ip: "213.203.134.10", port: 41258}, {ip: "217.61.172.12", port: 41369}, {ip: "46.232.143.126", port: 41258}, {ip: "46.232.143.253", port: 41258}, {ip: "93.67.154.125", port: 8080}, {ip: "93.67.154.125", port: 80}, {ip: "95.169.95.242", port: 53803}],
- "TH" => [{ip: "1.10.184.166", port: 57330}, {ip: "1.10.186.100", port: 55011}, {ip: "1.10.186.209", port: 32431}, {ip: "1.10.186.245", port: 34360}, {ip: "1.10.186.93", port: 53711}, {ip: "1.10.187.118", port: 62000}, {ip: "1.10.187.34", port: 51635}, {ip: "1.10.187.43", port: 38715}, {ip: "1.10.188.181", port: 51093}, {ip: "1.10.188.83", port: 31940}, {ip: "1.10.188.95", port: 30593}, {ip: "1.10.189.58", port: 48564}, {ip: "1.179.157.237", port: 46178}, {ip: "1.179.164.213", port: 8080}, {ip: "1.179.198.37", port: 8080}, {ip: "1.20.100.99", port: 53794}, {ip: "1.20.101.221", port: 55707}, {ip: "1.20.101.254", port: 35394}, {ip: "1.20.101.80", port: 36234}, {ip: "1.20.102.133", port: 40296}, {ip: "1.20.103.13", port: 40544}, {ip: "1.20.103.56", port: 55422}, {ip: "1.20.96.234", port: 53142}, {ip: "1.20.97.54", port: 60122}, {ip: "1.20.99.63", port: 32123}, {ip: "101.108.92.20", port: 8080}, {ip: "101.109.143.71", port: 36127}, {ip: "101.51.141.110", port: 42860}, {ip: "101.51.141.60", port: 60417}, {ip: "103.246.17.237", port: 3128}, {ip: "110.164.73.131", port: 8080}, {ip: "110.164.87.80", port: 35844}, {ip: "110.77.134.106", port: 8080}, {ip: "113.53.29.92", port: 47297}, {ip: "113.53.83.192", port: 32780}, {ip: "113.53.83.195", port: 35686}, {ip: "113.53.91.214", port: 8080}, {ip: "115.87.27.0", port: 53276}, {ip: "118.172.211.3", port: 58535}, {ip: "118.172.211.40", port: 30430}, {ip: "118.174.196.174", port: 23500}, {ip: "118.174.196.203", port: 23500}, {ip: "118.174.220.107", port: 41222}, {ip: "118.174.220.110", port: 39025}, {ip: "118.174.220.115", port: 41011}, {ip: "118.174.220.118", port: 59556}, {ip: "118.174.220.136", port: 55041}, {ip: "118.174.220.163", port: 31561}, {ip: "118.174.220.168", port: 47455}, {ip: "118.174.220.231", port: 40924}, {ip: "118.174.220.238", port: 46326}, {ip: "118.174.234.13", port: 53084}, {ip: "118.174.234.26", port: 41926}, {ip: "118.174.234.32", port: 57403}, {ip: "118.174.234.59", port: 59149}, {ip: "118.174.234.68", port: 42626}, {ip: "118.174.234.83", port: 38006}, {ip: "118.175.207.104", port: 38959}, {ip: "118.175.244.111", port: 8080}, {ip: "118.175.93.207", port: 50738}, {ip: "122.154.38.53", port: 8080}, {ip: "122.154.59.6", port: 8080}, {ip: "122.154.72.102", port: 8080}, {ip: "122.155.222.98", port: 3128}, {ip: "124.121.22.121", port: 61699}, {ip: "125.24.156.16", port: 44321}, {ip: "125.25.165.105", port: 33850}, {ip: "125.25.165.111", port: 40808}, {ip: "125.25.165.42", port: 47221}, {ip: "125.25.201.14", port: 30100}, {ip: "125.26.99.135", port: 55637}, {ip: "125.26.99.141", port: 38537}, {ip: "125.26.99.148", port: 31818}, {ip: "134.236.247.137", port: 8080}, {ip: "159.192.98.224", port: 3128}, {ip: "171.100.2.154", port: 8080}, {ip: "171.100.9.126", port: 49163}, {ip: "180.180.156.116", port: 48431}, {ip: "180.180.156.46", port: 48507}, {ip: "180.180.156.87", port: 36628}, {ip: "180.180.218.204", port: 51565}, {ip: "180.180.8.34", port: 8080}, {ip: "182.52.238.125", port: 58861}, {ip: "182.52.74.73", port: 36286}, {ip: "182.52.74.76", port: 34084}, {ip: "182.52.74.77", port: 34825}, {ip: "182.52.74.78", port: 48708}, {ip: "182.52.90.45", port: 53799}, {ip: "182.53.206.155", port: 34307}, {ip: "182.53.206.43", port: 45330}, {ip: "182.53.206.49", port: 54228}, {ip: "183.88.212.141", port: 8080}, {ip: "183.88.212.184", port: 8080}, {ip: "183.88.213.85", port: 8080}, {ip: "183.88.214.47", port: 8080}, {ip: "184.82.128.211", port: 8080}, {ip: "202.183.201.13", port: 8081}, {ip: "202.29.20.151", port: 43083}, {ip: "203.150.172.151", port: 8080}, {ip: "27.131.157.94", port: 8080}, {ip: "27.145.100.22", port: 8080}, {ip: "27.145.100.243", port: 8080}, {ip: "49.231.196.114", port: 53281}, {ip: "58.97.72.83", port: 8080}, {ip: "61.19.145.66", port: 8080}],
- "ES" => [{ip: "185.198.184.14", port: 48122}, {ip: "185.26.226.241", port: 36012}, {ip: "194.224.188.82", port: 3128}, {ip: "195.235.68.61", port: 3128}, {ip: "195.53.237.122", port: 3128}, {ip: "195.53.86.82", port: 3128}, {ip: "213.96.245.47", port: 8080}, {ip: "217.125.71.214", port: 33950}, {ip: "62.14.178.72", port: 53281}, {ip: "80.35.254.42", port: 53281}, {ip: "81.33.4.214", port: 61711}, {ip: "83.175.238.170", port: 53281}, {ip: "85.217.137.77", port: 3128}, {ip: "90.170.205.178", port: 33680}, {ip: "93.156.177.91", port: 53281}, {ip: "95.60.152.139", port: 37995}],
- "AE" => [{ip: "178.32.5.90", port: 36159}],
- "KR" => [{ip: "112.217.219.179", port: 3128}, {ip: "114.141.229.2", port: 58115}, {ip: "121.139.218.165", port: 31409}, {ip: "122.49.112.2", port: 38592}, {ip: "61.42.18.132", port: 53281}],
- "BR" => [{ip: "128.201.97.157", port: 53281}, {ip: "128.201.97.158", port: 53281}, {ip: "131.0.246.157", port: 35252}, {ip: "131.161.26.90", port: 8080}, {ip: "131.72.143.100", port: 41396}, {ip: "138.0.24.66", port: 53281}, {ip: "138.121.130.50", port: 50600}, {ip: "138.121.155.127", port: 61932}, {ip: "138.121.32.133", port: 23492}, {ip: "138.185.176.63", port: 53281}, {ip: "138.204.233.190", port: 53281}, {ip: "138.204.233.242", port: 53281}, {ip: "138.219.71.74", port: 52688}, {ip: "138.36.107.24", port: 41184}, {ip: "138.94.115.166", port: 8080}, {ip: "143.0.188.161", port: 53281}, {ip: "143.202.218.135", port: 8080}, {ip: "143.208.2.42", port: 53281}, {ip: "143.208.79.223", port: 8080}, {ip: "143.255.52.102", port: 40687}, {ip: "143.255.52.116", port: 57856}, {ip: "143.255.52.117", port: 37279}, {ip: "144.217.22.128", port: 8080}, {ip: "168.0.8.225", port: 8080}, {ip: "168.0.8.55", port: 8080}, {ip: "168.121.139.54", port: 40056}, {ip: "168.181.168.23", port: 53281}, {ip: "168.181.170.198", port: 31935}, {ip: "168.232.198.25", port: 32009}, {ip: "168.232.198.35", port: 42267}, {ip: "168.232.207.145", port: 46342}, {ip: "170.0.104.107", port: 60337}, {ip: "170.0.112.2", port: 50359}, {ip: "170.0.112.229", port: 50359}, {ip: "170.238.118.107", port: 34314}, {ip: "170.239.144.9", port: 3128}, {ip: "170.247.29.138", port: 8080}, {ip: "170.81.237.36", port: 37124}, {ip: "170.84.51.74", port: 53281}, {ip: "170.84.60.222", port: 42981}, {ip: "177.10.202.67", port: 8080}, {ip: "177.101.60.86", port: 80}, {ip: "177.103.231.211", port: 55091}, {ip: "177.12.80.50", port: 50556}, {ip: "177.131.13.9", port: 20183}, {ip: "177.135.178.115", port: 42510}, {ip: "177.135.248.75", port: 20183}, {ip: "177.184.206.238", port: 39508}, {ip: "177.185.148.46", port: 58623}, {ip: "177.200.83.238", port: 8080}, {ip: "177.21.24.146", port: 666}, {ip: "177.220.188.120", port: 47556}, {ip: "177.220.188.213", port: 8080}, {ip: "177.222.229.243", port: 23500}, {ip: "177.234.161.42", port: 8080}, {ip: "177.36.11.241", port: 3128}, {ip: "177.36.12.193", port: 23500}, {ip: "177.37.199.175", port: 49608}, {ip: "177.39.187.70", port: 37315}, {ip: "177.44.175.199", port: 8080}, {ip: "177.46.148.126", port: 3128}, {ip: "177.46.148.142", port: 3128}, {ip: "177.47.194.98", port: 21231}, {ip: "177.5.98.58", port: 20183}, {ip: "177.52.55.19", port: 60901}, {ip: "177.54.200.66", port: 57526}, {ip: "177.55.255.74", port: 37147}, {ip: "177.67.217.94", port: 53281}, {ip: "177.73.248.6", port: 54381}, {ip: "177.73.4.234", port: 23500}, {ip: "177.75.143.211", port: 35955}, {ip: "177.75.161.206", port: 3128}, {ip: "177.75.86.49", port: 20183}, {ip: "177.8.216.106", port: 8080}, {ip: "177.8.216.114", port: 8080}, {ip: "177.8.37.247", port: 56052}, {ip: "177.84.216.17", port: 50569}, {ip: "177.85.200.254", port: 53095}, {ip: "177.87.169.1", port: 53281}, {ip: "179.107.97.178", port: 3128}, {ip: "179.109.144.25", port: 8080}, {ip: "179.109.193.137", port: 53281}, {ip: "179.189.125.206", port: 8080}, {ip: "179.97.30.46", port: 53100}, {ip: "186.192.195.220", port: 38983}, {ip: "186.193.11.226", port: 48999}, {ip: "186.193.26.106", port: 3128}, {ip: "186.208.220.248", port: 3128}, {ip: "186.209.243.142", port: 3128}, {ip: "186.209.243.233", port: 3128}, {ip: "186.211.106.227", port: 34334}, {ip: "186.211.160.178", port: 36756}, {ip: "186.215.133.170", port: 20183}, {ip: "186.216.81.21", port: 31773}, {ip: "186.219.214.13", port: 32708}, {ip: "186.224.94.6", port: 48957}, {ip: "186.225.97.246", port: 43082}, {ip: "186.226.171.163", port: 48698}, {ip: "186.226.179.2", port: 56089}, {ip: "186.226.234.67", port: 33834}, {ip: "186.228.147.58", port: 20183}, {ip: "186.233.97.163", port: 8888}, {ip: "186.248.170.82", port: 53281}, {ip: "186.249.213.101", port: 53482}, {ip: "186.249.213.65", port: 52018}, {ip: "186.250.213.225", port: 60774}, {ip: "186.250.96.70", port: 8080}, {ip: "186.250.96.77", port: 8080}, {ip: "187.1.43.246", port: 53396}, {ip: "187.108.36.250", port: 20183}, {ip: "187.108.38.10", port: 20183}, {ip: "187.109.36.251", port: 20183}, {ip: "187.109.40.9", port: 20183}, {ip: "187.109.56.101", port: 20183}, {ip: "187.111.90.89", port: 53281}, {ip: "187.115.10.50", port: 20183}, {ip: "187.19.62.7", port: 59010}, {ip: "187.33.79.61", port: 33469}, {ip: "187.35.158.150", port: 38872}, {ip: "187.44.1.167", port: 8080}, {ip: "187.45.127.87", port: 20183}, {ip: "187.45.156.109", port: 8080}, {ip: "187.5.218.215", port: 20183}, {ip: "187.58.65.225", port: 3128}, {ip: "187.63.111.37", port: 3128}, {ip: "187.72.166.10", port: 8080}, {ip: "187.73.68.14", port: 53281}, {ip: "187.84.177.6", port: 45903}, {ip: "187.84.191.170", port: 43936}, {ip: "187.87.204.210", port: 45597}, {ip: "187.87.39.247", port: 31793}, {ip: "189.1.16.162", port: 23500}, {ip: "189.113.124.162", port: 8080}, {ip: "189.124.195.185", port: 37318}, {ip: "189.3.196.18", port: 61595}, {ip: "189.37.33.59", port: 35532}, {ip: "189.7.49.66", port: 42700}, {ip: "189.90.194.35", port: 30843}, {ip: "189.90.248.75", port: 8080}, {ip: "189.91.231.43", port: 3128}, {ip: "191.239.243.156", port: 3128}, {ip: "191.240.154.246", port: 23500}, {ip: "191.240.156.154", port: 36127}, {ip: "191.240.99.142", port: 9090}, {ip: "191.241.226.230", port: 53281}, {ip: "191.241.228.74", port: 20183}, {ip: "191.241.228.78", port: 20183}, {ip: "191.241.33.238", port: 39188}, {ip: "191.241.36.170", port: 8080}, {ip: "191.241.36.218", port: 3128}, {ip: "191.242.182.132", port: 8081}, {ip: "191.243.221.130", port: 3128}, {ip: "191.255.207.231", port: 20183}, {ip: "191.36.192.196", port: 3128}, {ip: "191.36.244.230", port: 51377}, {ip: "191.5.0.79", port: 53281}, {ip: "191.6.228.6", port: 53281}, {ip: "191.7.193.18", port: 38133}, {ip: "191.7.20.134", port: 3128}, {ip: "192.140.91.173", port: 20183}, {ip: "200.150.86.138", port: 44677}, {ip: "200.155.36.185", port: 3128}, {ip: "200.155.36.188", port: 3128}, {ip: "200.155.39.41", port: 3128}, {ip: "200.174.158.26", port: 34112}, {ip: "200.187.177.105", port: 20183}, {ip: "200.187.87.138", port: 20183}, {ip: "200.192.252.201", port: 8080}, {ip: "200.192.255.102", port: 8080}, {ip: "200.203.144.2", port: 50262}, {ip: "200.229.238.42", port: 20183}, {ip: "200.233.134.85", port: 43172}, {ip: "200.233.136.177", port: 20183}, {ip: "200.241.44.3", port: 20183}, {ip: "200.255.122.170", port: 8080}, {ip: "200.255.122.174", port: 8080}, {ip: "201.12.21.57", port: 8080}, {ip: "201.131.224.21", port: 56200}, {ip: "201.182.223.16", port: 37492}, {ip: "201.20.89.126", port: 8080}, {ip: "201.22.95.10", port: 8080}, {ip: "201.57.167.34", port: 8080}, {ip: "201.59.200.246", port: 80}, {ip: "201.6.167.178", port: 3128}, {ip: "201.90.36.194", port: 3128}, {ip: "45.226.20.6", port: 8080}, {ip: "45.234.139.129", port: 20183}, {ip: "45.234.200.18", port: 53281}, {ip: "45.235.87.4", port: 51996}, {ip: "45.6.136.38", port: 53281}, {ip: "45.6.80.131", port: 52080}, {ip: "45.6.93.10", port: 8080}, {ip: "45.71.108.162", port: 53281}],
- "PK" => [{ip: "103.18.243.154", port: 8080}, {ip: "110.36.218.126", port: 36651}, {ip: "110.36.234.210", port: 8080}, {ip: "110.39.162.74", port: 53281}, {ip: "110.39.174.58", port: 8080}, {ip: "111.68.108.34", port: 8080}, {ip: "125.209.116.182", port: 31653}, {ip: "125.209.78.21", port: 8080}, {ip: "125.209.82.78", port: 35087}, {ip: "180.92.156.150", port: 8080}, {ip: "202.142.158.114", port: 8080}, {ip: "202.147.173.10", port: 8080}, {ip: "202.147.173.10", port: 80}, {ip: "202.69.38.82", port: 8080}, {ip: "203.128.16.126", port: 59538}, {ip: "203.128.16.154", port: 33002}, {ip: "27.255.4.170", port: 8080}],
- "ID" => [{ip: "101.128.68.113", port: 8080}, {ip: "101.255.116.113", port: 53281}, {ip: "101.255.120.170", port: 6969}, {ip: "101.255.121.74", port: 8080}, {ip: "101.255.124.242", port: 8080}, {ip: "101.255.124.242", port: 80}, {ip: "101.255.56.138", port: 53560}, {ip: "103.10.171.132", port: 41043}, {ip: "103.10.81.172", port: 80}, {ip: "103.108.158.3", port: 48196}, {ip: "103.111.219.159", port: 53281}, {ip: "103.111.54.26", port: 49781}, {ip: "103.111.54.74", port: 8080}, {ip: "103.19.110.177", port: 8080}, {ip: "103.2.146.66", port: 49089}, {ip: "103.206.168.177", port: 53281}, {ip: "103.206.253.58", port: 49573}, {ip: "103.21.92.254", port: 33929}, {ip: "103.226.49.83", port: 23500}, {ip: "103.227.147.142", port: 37581}, {ip: "103.23.101.58", port: 8080}, {ip: "103.24.107.2", port: 8181}, {ip: "103.245.19.222", port: 53281}, {ip: "103.247.122.38", port: 8080}, {ip: "103.247.218.166", port: 3128}, {ip: "103.248.219.26", port: 53634}, {ip: "103.253.2.165", port: 33543}, {ip: "103.253.2.168", port: 51229}, {ip: "103.253.2.174", port: 30827}, {ip: "103.28.114.134", port: 8080}, {ip: "103.28.220.73", port: 53281}, {ip: "103.30.246.47", port: 3128}, {ip: "103.31.45.169", port: 57655}, {ip: "103.41.122.14", port: 53281}, {ip: "103.75.101.97", port: 8080}, {ip: "103.76.17.151", port: 23500}, {ip: "103.76.50.181", port: 8080}, {ip: "103.76.50.181", port: 80}, {ip: "103.76.50.182", port: 8080}, {ip: "103.78.74.170", port: 3128}, {ip: "103.78.80.194", port: 33442}, {ip: "103.8.122.5", port: 53297}, {ip: "103.80.236.107", port: 53281}, {ip: "103.80.238.203", port: 53281}, {ip: "103.86.140.74", port: 59538}, {ip: "103.94.122.254", port: 8080}, {ip: "103.94.125.244", port: 41508}, {ip: "103.94.169.19", port: 8080}, {ip: "103.94.7.254", port: 53281}, {ip: "106.0.51.50", port: 17385}, {ip: "110.93.13.202", port: 34881}, {ip: "112.78.37.6", port: 54791}, {ip: "114.199.110.58", port: 55898}, {ip: "114.199.112.170", port: 23500}, {ip: "114.199.123.194", port: 8080}, {ip: "114.57.33.162", port: 46935}, {ip: "114.57.33.214", port: 8080}, {ip: "114.6.197.254", port: 8080}, {ip: "114.7.15.146", port: 8080}, {ip: "114.7.162.254", port: 53281}, {ip: "115.124.75.226", port: 53990}, {ip: "115.124.75.228", port: 3128}, {ip: "117.102.78.42", port: 8080}, {ip: "117.102.93.251", port: 8080}, {ip: "117.102.94.186", port: 8080}, {ip: "117.102.94.186", port: 80}, {ip: "117.103.2.249", port: 58276}, {ip: "117.54.13.174", port: 34190}, {ip: "117.74.124.129", port: 8088}, {ip: "118.97.100.83", port: 35220}, {ip: "118.97.191.162", port: 80}, {ip: "118.97.191.203", port: 8080}, {ip: "118.97.36.18", port: 8080}, {ip: "118.97.73.85", port: 53281}, {ip: "118.99.105.226", port: 8080}, {ip: "119.252.168.53", port: 53281}, {ip: "122.248.45.35", port: 53281}, {ip: "122.50.6.186", port: 8080}, {ip: "122.50.6.186", port: 80}, {ip: "123.231.226.114", port: 47562}, {ip: "123.255.202.83", port: 32523}, {ip: "124.158.164.195", port: 8080}, {ip: "124.81.99.30", port: 3128}, {ip: "137.59.162.10", port: 3128}, {ip: "139.0.29.20", port: 59532}, {ip: "139.255.123.194", port: 4550}, {ip: "139.255.16.171", port: 31773}, {ip: "139.255.17.2", port: 47421}, {ip: "139.255.19.162", port: 42371}, {ip: "139.255.7.81", port: 53281}, {ip: "139.255.91.115", port: 8080}, {ip: "139.255.92.26", port: 53281}, {ip: "158.140.181.140", port: 54041}, {ip: "160.202.40.20", port: 55655}, {ip: "175.103.42.147", port: 8080}, {ip: "180.178.98.198", port: 8080}, {ip: "180.250.101.146", port: 8080}, {ip: "182.23.107.212", port: 3128}, {ip: "182.23.2.101", port: 49833}, {ip: "182.23.7.226", port: 8080}, {ip: "182.253.209.203", port: 3128}, {ip: "183.91.66.210", port: 80}, {ip: "202.137.10.179", port: 57338}, {ip: "202.137.25.53", port: 3128}, {ip: "202.137.25.8", port: 8080}, {ip: "202.138.242.76", port: 4550}, {ip: "202.138.249.202", port: 43108}, {ip: "202.148.2.254", port: 8000}, {ip: "202.162.201.94", port: 53281}, {ip: "202.165.47.26", port: 8080}, {ip: "202.43.167.130", port: 8080}, {ip: "202.51.126.10", port: 53281}, {ip: "202.59.171.164", port: 58567}, {ip: "202.93.128.98", port: 3128}, {ip: "203.142.72.114", port: 808}, {ip: "203.153.117.65", port: 54144}, {ip: "203.189.89.1", port: 53281}, {ip: "203.77.239.18", port: 37002}, {ip: "203.99.123.25", port: 61502}, {ip: "220.247.168.163", port: 53281}, {ip: "220.247.173.154", port: 53281}, {ip: "220.247.174.206", port: 53445}, {ip: "222.124.131.211", port: 47343}, {ip: "222.124.173.146", port: 53281}, {ip: "222.124.2.131", port: 8080}, {ip: "222.124.2.186", port: 8080}, {ip: "222.124.215.187", port: 38913}, {ip: "222.124.221.179", port: 53281}, {ip: "223.25.101.242", port: 59504}, {ip: "223.25.97.62", port: 8080}, {ip: "223.25.99.38", port: 80}, {ip: "27.111.44.202", port: 80}, {ip: "27.111.47.3", port: 51144}, {ip: "36.37.124.234", port: 36179}, {ip: "36.37.124.235", port: 36179}, {ip: "36.37.81.135", port: 8080}, {ip: "36.37.89.98", port: 32323}, {ip: "36.66.217.179", port: 8080}, {ip: "36.66.98.6", port: 53281}, {ip: "36.67.143.183", port: 48746}, {ip: "36.67.206.187", port: 8080}, {ip: "36.67.32.87", port: 8080}, {ip: "36.67.93.220", port: 3128}, {ip: "36.67.93.220", port: 80}, {ip: "36.89.10.51", port: 34115}, {ip: "36.89.119.149", port: 8080}, {ip: "36.89.157.23", port: 37728}, {ip: "36.89.181.155", port: 60165}, {ip: "36.89.188.11", port: 39507}, {ip: "36.89.194.113", port: 37811}, {ip: "36.89.226.254", port: 8081}, {ip: "36.89.232.138", port: 23500}, {ip: "36.89.39.10", port: 3128}, {ip: "36.89.65.253", port: 60997}, {ip: "43.243.141.114", port: 8080}, {ip: "43.245.184.202", port: 41102}, {ip: "43.245.184.238", port: 80}, {ip: "66.96.233.225", port: 35053}, {ip: "66.96.237.253", port: 8080}],
- "BD" => [{ip: "103.103.88.91", port: 8080}, {ip: "103.106.119.154", port: 8080}, {ip: "103.106.236.1", port: 8080}, {ip: "103.106.236.41", port: 8080}, {ip: "103.108.144.139", port: 53281}, {ip: "103.109.57.218", port: 8080}, {ip: "103.109.58.242", port: 8080}, {ip: "103.112.129.106", port: 31094}, {ip: "103.112.129.82", port: 53281}, {ip: "103.114.10.177", port: 8080}, {ip: "103.114.10.250", port: 8080}, {ip: "103.15.245.26", port: 8080}, {ip: "103.195.204.73", port: 21776}, {ip: "103.197.49.106", port: 49688}, {ip: "103.198.168.29", port: 21776}, {ip: "103.214.200.6", port: 59008}, {ip: "103.218.25.161", port: 8080}, {ip: "103.218.25.41", port: 8080}, {ip: "103.218.26.204", port: 8080}, {ip: "103.218.27.221", port: 8080}, {ip: "103.231.229.90", port: 53281}, {ip: "103.239.252.233", port: 8080}, {ip: "103.239.252.50", port: 8080}, {ip: "103.239.253.193", port: 8080}, {ip: "103.250.68.193", port: 51370}, {ip: "103.5.232.146", port: 8080}, {ip: "103.73.224.53", port: 23500}, {ip: "103.9.134.73", port: 65301}, {ip: "113.11.47.242", port: 40071}, {ip: "113.11.5.67", port: 40071}, {ip: "114.31.5.34", port: 52606}, {ip: "115.127.51.226", port: 42764}, {ip: "115.127.64.62", port: 39611}, {ip: "115.127.91.106", port: 8080}, {ip: "119.40.85.198", port: 36899}, {ip: "123.200.29.110", port: 23500}, {ip: "123.49.51.42", port: 55124}, {ip: "163.47.36.90", port: 3128}, {ip: "180.211.134.158", port: 23500}, {ip: "180.211.193.74", port: 40536}, {ip: "180.92.238.226", port: 53451}, {ip: "182.160.104.213", port: 8080}, {ip: "202.191.126.58", port: 23500}, {ip: "202.4.126.170", port: 8080}, {ip: "202.5.37.241", port: 33623}, {ip: "202.5.57.5", port: 61729}, {ip: "202.79.17.65", port: 60122}, {ip: "203.188.248.52", port: 23500}, {ip: "27.147.146.78", port: 52220}, {ip: "27.147.164.10", port: 52344}, {ip: "27.147.212.38", port: 53281}, {ip: "27.147.217.154", port: 43252}, {ip: "27.147.219.102", port: 49464}, {ip: "43.239.74.137", port: 8080}, {ip: "43.240.103.252", port: 8080}, {ip: "45.125.223.57", port: 8080}, {ip: "45.125.223.81", port: 8080}, {ip: "45.251.228.122", port: 41418}, {ip: "45.64.132.137", port: 8080}, {ip: "45.64.132.137", port: 80}, {ip: "61.247.186.137", port: 8080}],
- "MX" => [{ip: "148.217.94.54", port: 3128}, {ip: "177.244.28.77", port: 53281}, {ip: "187.141.73.147", port: 53281}, {ip: "187.185.15.35", port: 53281}, {ip: "187.188.46.172", port: 53455}, {ip: "187.216.83.185", port: 8080}, {ip: "187.216.90.46", port: 53281}, {ip: "187.243.253.182", port: 33796}, {ip: "189.195.132.86", port: 43286}, {ip: "189.204.158.161", port: 8080}, {ip: "200.79.180.115", port: 8080}, {ip: "201.140.113.90", port: 37193}, {ip: "201.144.14.229", port: 53281}, {ip: "201.163.73.93", port: 53281}],
- "PH" => [{ip: "103.86.187.242", port: 23500}, {ip: "122.54.101.69", port: 8080}, {ip: "122.54.65.150", port: 8080}, {ip: "125.5.20.134", port: 53281}, {ip: "146.88.77.51", port: 8080}, {ip: "182.18.200.92", port: 8080}, {ip: "219.90.87.91", port: 53281}, {ip: "58.69.12.210", port: 8080}],
- "EG" => [{ip: "41.65.0.167", port: 8080}],
- "VN" => [{ip: "1.55.240.156", port: 53281}, {ip: "101.99.23.136", port: 3128}, {ip: "103.15.51.160", port: 8080}, {ip: "113.161.128.169", port: 60427}, {ip: "113.161.161.143", port: 57967}, {ip: "113.161.173.10", port: 3128}, {ip: "113.161.35.108", port: 30028}, {ip: "113.164.79.177", port: 46281}, {ip: "113.190.235.50", port: 34619}, {ip: "115.78.160.247", port: 8080}, {ip: "117.2.155.29", port: 47228}, {ip: "117.2.17.26", port: 53281}, {ip: "117.2.22.41", port: 41973}, {ip: "117.4.145.16", port: 51487}, {ip: "118.69.219.185", port: 55184}, {ip: "118.69.61.212", port: 53281}, {ip: "118.70.116.227", port: 61651}, {ip: "118.70.219.124", port: 53281}, {ip: "221.121.12.238", port: 36077}, {ip: "27.2.7.59", port: 52148}],
- "CD" => [{ip: "41.79.233.45", port: 8080}],
- "TR" => [{ip: "151.80.65.175", port: 3128}, {ip: "176.235.186.242", port: 37043}, {ip: "178.250.92.18", port: 8080}, {ip: "185.203.170.92", port: 8080}, {ip: "185.203.170.94", port: 8080}, {ip: "185.203.170.95", port: 8080}, {ip: "185.51.36.152", port: 41258}, {ip: "195.137.223.50", port: 41336}, {ip: "195.155.98.70", port: 52598}, {ip: "212.156.146.22", port: 40080}, {ip: "213.14.31.122", port: 44621}, {ip: "31.145.137.139", port: 31871}, {ip: "31.145.138.129", port: 31871}, {ip: "31.145.138.146", port: 34159}, {ip: "31.145.187.172", port: 30636}, {ip: "78.188.4.124", port: 34514}, {ip: "88.248.23.216", port: 36426}, {ip: "93.182.72.36", port: 8080}, {ip: "95.0.194.241", port: 9090}],
-}
diff --git a/src/invidious/helpers/serialized_yt_data.cr b/src/invidious/helpers/serialized_yt_data.cr
new file mode 100644
index 00000000..1fef5f93
--- /dev/null
+++ b/src/invidious/helpers/serialized_yt_data.cr
@@ -0,0 +1,317 @@
+@[Flags]
+enum VideoBadges
+ LiveNow
+ Premium
+ ThreeD
+ FourK
+ New
+ EightK
+ VR180
+ VR360
+ ClosedCaptions
+end
+
+struct SearchVideo
+ include DB::Serializable
+
+ property title : String
+ property id : String
+ property author : String
+ property ucid : String
+ property published : Time
+ property views : Int64
+ property description_html : String
+ property length_seconds : Int32
+ property premiere_timestamp : Time?
+ property author_verified : Bool
+ property badges : VideoBadges
+
+ def to_xml(auto_generated, query_params, xml : XML::Builder)
+ query_params["v"] = self.id
+
+ xml.element("entry") do
+ xml.element("id") { xml.text "yt:video:#{self.id}" }
+ xml.element("yt:videoId") { xml.text self.id }
+ xml.element("yt:channelId") { xml.text self.ucid }
+ xml.element("title") { xml.text self.title }
+ xml.element("link", rel: "alternate", href: "#{HOST_URL}/watch?#{query_params}")
+
+ xml.element("author") do
+ if auto_generated
+ xml.element("name") { xml.text self.author }
+ xml.element("uri") { xml.text "#{HOST_URL}/channel/#{self.ucid}" }
+ else
+ xml.element("name") { xml.text author }
+ xml.element("uri") { xml.text "#{HOST_URL}/channel/#{ucid}" }
+ end
+ end
+
+ xml.element("content", type: "xhtml") do
+ xml.element("div", xmlns: "http://www.w3.org/1999/xhtml") do
+ xml.element("a", href: "#{HOST_URL}/watch?#{query_params}") do
+ xml.element("img", src: "#{HOST_URL}/vi/#{self.id}/mqdefault.jpg")
+ end
+
+ xml.element("p", style: "word-break:break-word;white-space:pre-wrap") { xml.text html_to_content(self.description_html) }
+ end
+ end
+
+ xml.element("published") { xml.text self.published.to_s("%Y-%m-%dT%H:%M:%S%:z") }
+
+ xml.element("media:group") do
+ xml.element("media:title") { xml.text self.title }
+ xml.element("media:thumbnail", url: "#{HOST_URL}/vi/#{self.id}/mqdefault.jpg",
+ width: "320", height: "180")
+ xml.element("media:description") { xml.text html_to_content(self.description_html) }
+ end
+
+ xml.element("media:community") do
+ xml.element("media:statistics", views: self.views)
+ end
+ end
+ end
+
+ def to_xml(auto_generated, query_params, _xml : Nil)
+ XML.build do |xml|
+ to_xml(auto_generated, query_params, xml)
+ end
+ end
+
+ def to_json(locale : String?, json : JSON::Builder)
+ json.object do
+ json.field "type", "video"
+ json.field "title", self.title
+ json.field "videoId", self.id
+
+ json.field "author", self.author
+ json.field "authorId", self.ucid
+ json.field "authorUrl", "/channel/#{self.ucid}"
+ json.field "authorVerified", self.author_verified
+
+ json.field "videoThumbnails" do
+ Invidious::JSONify::APIv1.thumbnails(json, self.id)
+ end
+
+ json.field "description", html_to_content(self.description_html)
+ json.field "descriptionHtml", self.description_html
+
+ json.field "viewCount", self.views
+ json.field "viewCountText", translate_count(locale, "generic_views_count", self.views, NumberFormatting::Short)
+ json.field "published", self.published.to_unix
+ json.field "publishedText", translate(locale, "`x` ago", recode_date(self.published, locale))
+ json.field "lengthSeconds", self.length_seconds
+ json.field "liveNow", self.badges.live_now?
+ json.field "premium", self.badges.premium?
+ json.field "isUpcoming", self.upcoming?
+
+ if self.premiere_timestamp
+ json.field "premiereTimestamp", self.premiere_timestamp.try &.to_unix
+ end
+ json.field "isNew", self.badges.new?
+ json.field "is4k", self.badges.four_k?
+ json.field "is8k", self.badges.eight_k?
+ json.field "isVr180", self.badges.vr180?
+ json.field "isVr360", self.badges.vr360?
+ json.field "is3d", self.badges.three_d?
+ json.field "hasCaptions", self.badges.closed_captions?
+ end
+ end
+
+ # TODO: remove the locale and follow the crystal convention
+ def to_json(locale : String?, _json : Nil)
+ JSON.build do |json|
+ to_json(locale, json)
+ end
+ end
+
+ def to_json(json : JSON::Builder)
+ to_json(nil, json)
+ end
+
+ def upcoming?
+ premiere_timestamp ? true : false
+ end
+end
+
+struct SearchPlaylistVideo
+ include DB::Serializable
+
+ property title : String
+ property id : String
+ property length_seconds : Int32
+end
+
+struct SearchPlaylist
+ include DB::Serializable
+
+ property title : String
+ property id : String
+ property author : String
+ property ucid : String
+ property video_count : Int32
+ property videos : Array(SearchPlaylistVideo)
+ property thumbnail : String?
+ property author_verified : Bool
+
+ def to_json(locale : String?, json : JSON::Builder)
+ json.object do
+ json.field "type", "playlist"
+ json.field "title", self.title
+ json.field "playlistId", self.id
+ json.field "playlistThumbnail", self.thumbnail
+
+ json.field "author", self.author
+ json.field "authorId", self.ucid
+ json.field "authorUrl", "/channel/#{self.ucid}"
+
+ json.field "authorVerified", self.author_verified
+
+ json.field "videoCount", self.video_count
+ json.field "videos" do
+ json.array do
+ self.videos.each do |video|
+ json.object do
+ json.field "title", video.title
+ json.field "videoId", video.id
+ json.field "lengthSeconds", video.length_seconds
+
+ json.field "videoThumbnails" do
+ Invidious::JSONify::APIv1.thumbnails(json, video.id)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ # TODO: remove the locale and follow the crystal convention
+ def to_json(locale : String?, _json : Nil)
+ JSON.build do |json|
+ to_json(locale, json)
+ end
+ end
+
+ def to_json(json : JSON::Builder)
+ to_json(nil, json)
+ end
+end
+
+struct SearchChannel
+ include DB::Serializable
+
+ property author : String
+ property ucid : String
+ property author_thumbnail : String
+ property subscriber_count : Int32
+ property video_count : Int32
+ property channel_handle : String?
+ property description_html : String
+ property auto_generated : Bool
+ property author_verified : Bool
+
+ def to_json(locale : String?, json : JSON::Builder)
+ json.object do
+ json.field "type", "channel"
+ json.field "author", self.author
+ json.field "authorId", self.ucid
+ json.field "authorUrl", "/channel/#{self.ucid}"
+ json.field "authorVerified", self.author_verified
+ json.field "authorThumbnails" do
+ json.array do
+ qualities = {32, 48, 76, 100, 176, 512}
+
+ qualities.each do |quality|
+ json.object do
+ json.field "url", self.author_thumbnail.gsub(/=\d+/, "=s#{quality}")
+ json.field "width", quality
+ json.field "height", quality
+ end
+ end
+ end
+ end
+
+ json.field "autoGenerated", self.auto_generated
+ json.field "subCount", self.subscriber_count
+ json.field "videoCount", self.video_count
+ json.field "channelHandle", self.channel_handle
+
+ json.field "description", html_to_content(self.description_html)
+ json.field "descriptionHtml", self.description_html
+ end
+ end
+
+ # TODO: remove the locale and follow the crystal convention
+ def to_json(locale : String?, _json : Nil)
+ JSON.build do |json|
+ to_json(locale, json)
+ end
+ end
+
+ def to_json(json : JSON::Builder)
+ to_json(nil, json)
+ end
+end
+
+struct SearchHashtag
+ include DB::Serializable
+
+ property title : String
+ property url : String
+ property video_count : Int64
+ property channel_count : Int64
+
+ def to_json(locale : String?, json : JSON::Builder)
+ json.object do
+ json.field "type", "hashtag"
+ json.field "title", self.title
+ json.field "url", self.url
+ json.field "videoCount", self.video_count
+ json.field "channelCount", self.channel_count
+ end
+ end
+end
+
+class Category
+ include DB::Serializable
+
+ property title : String
+ property contents : Array(SearchItem) | Array(Video)
+ property url : String?
+ property description_html : String
+ property badges : Array(Tuple(String, String))?
+
+ def to_json(locale : String?, json : JSON::Builder)
+ json.object do
+ json.field "type", "category"
+ json.field "title", self.title
+ json.field "contents" do
+ json.array do
+ self.contents.each do |item|
+ item.to_json(locale, json)
+ end
+ end
+ end
+ end
+ end
+
+ # TODO: remove the locale and follow the crystal convention
+ def to_json(locale : String?, _json : Nil)
+ JSON.build do |json|
+ to_json(locale, json)
+ end
+ end
+
+ def to_json(json : JSON::Builder)
+ to_json(nil, json)
+ end
+end
+
+struct Continuation
+ getter token
+
+ def initialize(@token : String)
+ end
+end
+
+alias SearchItem = SearchVideo | SearchChannel | SearchPlaylist | SearchHashtag | Category
diff --git a/src/invidious/helpers/sig_helper.cr b/src/invidious/helpers/sig_helper.cr
new file mode 100644
index 00000000..6d198a42
--- /dev/null
+++ b/src/invidious/helpers/sig_helper.cr
@@ -0,0 +1,349 @@
+require "uri"
+require "socket"
+require "socket/tcp_socket"
+require "socket/unix_socket"
+
+{% if flag?(:advanced_debug) %}
+ require "io/hexdump"
+{% end %}
+
+private alias NetworkEndian = IO::ByteFormat::NetworkEndian
+
+module Invidious::SigHelper
+ enum UpdateStatus
+ Updated
+ UpdateNotRequired
+ Error
+ end
+
+ # -------------------
+ # Payload types
+ # -------------------
+
+ abstract struct Payload
+ end
+
+ struct StringPayload < Payload
+ getter string : String
+
+ def initialize(str : String)
+ raise Exception.new("SigHelper: String can't be empty") if str.empty?
+ @string = str
+ end
+
+ def self.from_bytes(slice : Bytes)
+ size = IO::ByteFormat::NetworkEndian.decode(UInt16, slice)
+ if size == 0 # Error code
+ raise Exception.new("SigHelper: Server encountered an error")
+ end
+
+ if (slice.bytesize - 2) != size
+ raise Exception.new("SigHelper: String size mismatch")
+ end
+
+ if str = String.new(slice[2..])
+ return self.new(str)
+ else
+ raise Exception.new("SigHelper: Can't read string from socket")
+ end
+ end
+
+ def to_io(io)
+ # `.to_u16` raises if there is an overflow during the conversion
+ io.write_bytes(@string.bytesize.to_u16, NetworkEndian)
+ io.write(@string.to_slice)
+ end
+ end
+
+ private enum Opcode
+ FORCE_UPDATE = 0
+ DECRYPT_N_SIGNATURE = 1
+ DECRYPT_SIGNATURE = 2
+ GET_SIGNATURE_TIMESTAMP = 3
+ GET_PLAYER_STATUS = 4
+ PLAYER_UPDATE_TIMESTAMP = 5
+ end
+
+ private record Request,
+ opcode : Opcode,
+ payload : Payload?
+
+ # ----------------------
+ # High-level functions
+ # ----------------------
+
+ class Client
+ @mux : Multiplexor
+
+ def initialize(uri_or_path)
+ @mux = Multiplexor.new(uri_or_path)
+ end
+
+ # Forces the server to re-fetch the YouTube player, and extract the necessary
+ # components from it (nsig function code, sig function code, signature timestamp).
+ def force_update : UpdateStatus
+ request = Request.new(Opcode::FORCE_UPDATE, nil)
+
+ value = send_request(request) do |bytes|
+ IO::ByteFormat::NetworkEndian.decode(UInt16, bytes)
+ end
+
+ case value
+ when 0x0000 then return UpdateStatus::Error
+ when 0xFFFF then return UpdateStatus::UpdateNotRequired
+ when 0xF44F then return UpdateStatus::Updated
+ else
+ code = value.nil? ? "nil" : value.to_s(base: 16)
+ raise Exception.new("SigHelper: Invalid status code received #{code}")
+ end
+ end
+
+ # Decrypt a provided n signature using the server's current nsig function
+ # code, and return the result (or an error).
+ def decrypt_n_param(n : String) : String?
+ request = Request.new(Opcode::DECRYPT_N_SIGNATURE, StringPayload.new(n))
+
+ n_dec = self.send_request(request) do |bytes|
+ StringPayload.from_bytes(bytes).string
+ end
+
+ return n_dec
+ end
+
+ # Decrypt a provided s signature using the server's current sig function
+ # code, and return the result (or an error).
+ def decrypt_sig(sig : String) : String?
+ request = Request.new(Opcode::DECRYPT_SIGNATURE, StringPayload.new(sig))
+
+ sig_dec = self.send_request(request) do |bytes|
+ StringPayload.from_bytes(bytes).string
+ end
+
+ return sig_dec
+ end
+
+ # Return the signature timestamp from the server's current player
+ def get_signature_timestamp : UInt64?
+ request = Request.new(Opcode::GET_SIGNATURE_TIMESTAMP, nil)
+
+ return self.send_request(request) do |bytes|
+ IO::ByteFormat::NetworkEndian.decode(UInt64, bytes)
+ end
+ end
+
+ # Return the current player's version
+ def get_player : UInt32?
+ request = Request.new(Opcode::GET_PLAYER_STATUS, nil)
+
+ return self.send_request(request) do |bytes|
+ has_player = (bytes[0] == 0xFF)
+ player_version = IO::ByteFormat::NetworkEndian.decode(UInt32, bytes[1..4])
+ has_player ? player_version : nil
+ end
+ end
+
+ # Return when the player was last updated
+ def get_player_timestamp : UInt64?
+ request = Request.new(Opcode::PLAYER_UPDATE_TIMESTAMP, nil)
+
+ return self.send_request(request) do |bytes|
+ IO::ByteFormat::NetworkEndian.decode(UInt64, bytes)
+ end
+ end
+
+ private def send_request(request : Request, &)
+ channel = @mux.send(request)
+ slice = channel.receive
+ return yield slice
+ rescue ex
+ LOGGER.debug("SigHelper: Error when sending a request")
+ LOGGER.trace(ex.inspect_with_backtrace)
+ return nil
+ end
+ end
+
+ # ---------------------
+ # Low level functions
+ # ---------------------
+
+ class Multiplexor
+ alias TransactionID = UInt32
+ record Transaction, channel = ::Channel(Bytes).new
+
+ @prng = Random.new
+ @mutex = Mutex.new
+ @queue = {} of TransactionID => Transaction
+
+ @conn : Connection
+ @uri_or_path : String
+
+ def initialize(@uri_or_path)
+ @conn = Connection.new(uri_or_path)
+ listen
+ end
+
+ def listen : Nil
+ raise "Socket is closed" if @conn.closed?
+
+ LOGGER.debug("SigHelper: Multiplexor listening")
+
+ spawn do
+ loop do
+ begin
+ receive_data
+ rescue ex
+ LOGGER.info("SigHelper: Connection to helper died with '#{ex.message}' trying to reconnect...")
+ # We close the socket because for some reason is not closed.
+ @conn.close
+ loop do
+ begin
+ @conn = Connection.new(@uri_or_path)
+ LOGGER.info("SigHelper: Reconnected to SigHelper!")
+ rescue ex
+ LOGGER.debug("SigHelper: Reconnection to helper unsuccessful with error '#{ex.message}'. Retrying")
+ sleep 500.milliseconds
+ next
+ end
+ break if !@conn.closed?
+ end
+ end
+ Fiber.yield
+ end
+ end
+ end
+
+ def send(request : Request)
+ transaction = Transaction.new
+ transaction_id = @prng.rand(TransactionID)
+
+ # Add transaction to queue
+ @mutex.synchronize do
+ # On a 32-bits random integer, this should never happen. Though, just in case, ...
+ if @queue[transaction_id]?
+ raise Exception.new("SigHelper: Duplicate transaction ID! You got a shiny pokemon!")
+ end
+
+ @queue[transaction_id] = transaction
+ end
+
+ write_packet(transaction_id, request)
+
+ return transaction.channel
+ end
+
+ def receive_data
+ transaction_id, slice = read_packet
+
+ @mutex.synchronize do
+ if transaction = @queue.delete(transaction_id)
+ # Remove transaction from queue and send data to the channel
+ transaction.channel.send(slice)
+ LOGGER.trace("SigHelper: Transaction unqueued and data sent to channel")
+ else
+ raise Exception.new("SigHelper: Received transaction was not in queue")
+ end
+ end
+ end
+
+ # Read a single packet from the socket
+ private def read_packet : {TransactionID, Bytes}
+ # Header
+ transaction_id = @conn.read_bytes(UInt32, NetworkEndian)
+ length = @conn.read_bytes(UInt32, NetworkEndian)
+
+ LOGGER.trace("SigHelper: Recv transaction 0x#{transaction_id.to_s(base: 16)} / length #{length}")
+
+ if length > 67_000
+ raise Exception.new("SigHelper: Packet longer than expected (#{length})")
+ end
+
+ # Payload
+ slice = Bytes.new(length)
+ @conn.read(slice) if length > 0
+
+ LOGGER.trace("SigHelper: payload = #{slice}")
+ LOGGER.trace("SigHelper: Recv transaction 0x#{transaction_id.to_s(base: 16)} - Done")
+
+ return transaction_id, slice
+ end
+
+ # Write a single packet to the socket
+ private def write_packet(transaction_id : TransactionID, request : Request)
+ LOGGER.trace("SigHelper: Send transaction 0x#{transaction_id.to_s(base: 16)} / opcode #{request.opcode}")
+
+ io = IO::Memory.new(1024)
+ io.write_bytes(request.opcode.to_u8, NetworkEndian)
+ io.write_bytes(transaction_id, NetworkEndian)
+
+ if payload = request.payload
+ payload.to_io(io)
+ end
+
+ @conn.send(io)
+ @conn.flush
+
+ LOGGER.trace("SigHelper: Send transaction 0x#{transaction_id.to_s(base: 16)} - Done")
+ end
+ end
+
+ class Connection
+ @socket : UNIXSocket | TCPSocket
+
+ {% if flag?(:advanced_debug) %}
+ @io : IO::Hexdump
+ {% end %}
+
+ def initialize(host_or_path : String)
+ case host_or_path
+ when .starts_with?('/')
+ # Make sure that the file exists
+ if File.exists?(host_or_path)
+ @socket = UNIXSocket.new(host_or_path)
+ else
+ raise Exception.new("SigHelper: '#{host_or_path}' no such file")
+ end
+ when .starts_with?("tcp://")
+ uri = URI.parse(host_or_path)
+ @socket = TCPSocket.new(uri.host.not_nil!, uri.port.not_nil!)
+ else
+ uri = URI.parse("tcp://#{host_or_path}")
+ @socket = TCPSocket.new(uri.host.not_nil!, uri.port.not_nil!)
+ end
+ LOGGER.info("SigHelper: Using helper at '#{host_or_path}'")
+
+ {% if flag?(:advanced_debug) %}
+ @io = IO::Hexdump.new(@socket, output: STDERR, read: true, write: true)
+ {% end %}
+
+ @socket.sync = false
+ @socket.blocking = false
+ end
+
+ def closed? : Bool
+ return @socket.closed?
+ end
+
+ def close : Nil
+ @socket.close if !@socket.closed?
+ end
+
+ def flush(*args, **options)
+ @socket.flush(*args, **options)
+ end
+
+ def send(*args, **options)
+ @socket.send(*args, **options)
+ end
+
+ # Wrap IO functions, with added debug tooling if needed
+ {% for function in %w(read read_bytes write write_bytes) %}
+ def {{function.id}}(*args, **options)
+ {% if flag?(:advanced_debug) %}
+ @io.{{function.id}}(*args, **options)
+ {% else %}
+ @socket.{{function.id}}(*args, **options)
+ {% end %}
+ end
+ {% end %}
+ end
+end
diff --git a/src/invidious/helpers/signatures.cr b/src/invidious/helpers/signatures.cr
index d8b1de65..82a28fc0 100644
--- a/src/invidious/helpers/signatures.cr
+++ b/src/invidious/helpers/signatures.cr
@@ -1,73 +1,53 @@
-alias SigProc = Proc(Array(String), Int32, Array(String))
+require "http/params"
+require "./sig_helper"
-struct DecryptFunction
- @decrypt_function = [] of {SigProc, Int32}
- @decrypt_time = Time.monotonic
+class Invidious::DecryptFunction
+ @last_update : Time = Time.utc - 42.days
- def initialize(@use_polling = true)
+ def initialize(uri_or_path)
+ @client = SigHelper::Client.new(uri_or_path)
+ self.check_update
end
- def update_decrypt_function
- @decrypt_function = fetch_decrypt_function
- end
-
- private def fetch_decrypt_function(id = "CvFH_6DNRCY")
- document = YT_POOL.client &.get("/watch?v=#{id}&gl=US&hl=en").body
- url = document.match(/src="(?<url>\/s\/player\/[^\/]+\/player_ias[^\/]+\/en_US\/base.js)"/).not_nil!["url"]
- player = YT_POOL.client &.get(url).body
-
- function_name = player.match(/^(?<name>[^=]+)=function\(\w\){\w=\w\.split\(""\);[^\. ]+\.[^( ]+/m).not_nil!["name"]
- function_body = player.match(/^#{Regex.escape(function_name)}=function\(\w\){(?<body>[^}]+)}/m).not_nil!["body"]
- function_body = function_body.split(";")[1..-2]
-
- var_name = function_body[0][0, 2]
- var_body = player.delete("\n").match(/var #{Regex.escape(var_name)}={(?<body>(.*?))};/).not_nil!["body"]
-
- operations = {} of String => SigProc
- var_body.split("},").each do |operation|
- op_name = operation.match(/^[^:]+/).not_nil![0]
- op_body = operation.match(/\{[^}]+/).not_nil![0]
-
- case op_body
- when "{a.reverse()"
- operations[op_name] = ->(a : Array(String), b : Int32) { a.reverse }
- when "{a.splice(0,b)"
- operations[op_name] = ->(a : Array(String), b : Int32) { a.delete_at(0..(b - 1)); a }
- else
- operations[op_name] = ->(a : Array(String), b : Int32) { c = a[0]; a[0] = a[b % a.size]; a[b % a.size] = c; a }
- end
- end
+ def check_update
+ # If we have updated in the last 5 minutes, do nothing
+ return if (Time.utc - @last_update) < 5.minutes
- decrypt_function = [] of {SigProc, Int32}
- function_body.each do |function|
- function = function.lchop(var_name).delete("[].")
+ # Get the amount of time elapsed since when the player was updated, in the
+ # event where multiple invidious processes are run in parallel.
+ update_time_elapsed = (@client.get_player_timestamp || 301).seconds
- op_name = function.match(/[^\(]+/).not_nil![0]
- value = function.match(/\(\w,(?<value>[\d]+)\)/).not_nil!["value"].to_i
-
- decrypt_function << {operations[op_name], value}
+ if update_time_elapsed > 5.minutes
+ LOGGER.debug("Signature: Player might be outdated, updating")
+ @client.force_update
+ @last_update = Time.utc
end
-
- return decrypt_function
end
- def decrypt_signature(fmt : Hash(String, JSON::Any))
- return "" if !fmt["s"]? || !fmt["sp"]?
-
- sp = fmt["sp"].as_s
- sig = fmt["s"].as_s.split("")
- if !@use_polling
- now = Time.monotonic
- if now - @decrypt_time > 60.seconds || @decrypt_function.size == 0
- @decrypt_function = fetch_decrypt_function
- @decrypt_time = Time.monotonic
- end
- end
+ def decrypt_nsig(n : String) : String?
+ self.check_update
+ return @client.decrypt_n_param(n)
+ rescue ex
+ LOGGER.debug(ex.message || "Signature: Unknown error")
+ LOGGER.trace(ex.inspect_with_backtrace)
+ return nil
+ end
- @decrypt_function.each do |proc, value|
- sig = proc.call(sig, value)
- end
+ def decrypt_signature(str : String) : String?
+ self.check_update
+ return @client.decrypt_sig(str)
+ rescue ex
+ LOGGER.debug(ex.message || "Signature: Unknown error")
+ LOGGER.trace(ex.inspect_with_backtrace)
+ return nil
+ end
- return "&#{sp}=#{sig.join("")}"
+ def get_sts : UInt64?
+ self.check_update
+ return @client.get_signature_timestamp
+ rescue ex
+ LOGGER.debug(ex.message || "Signature: Unknown error")
+ LOGGER.trace(ex.inspect_with_backtrace)
+ return nil
end
end
diff --git a/src/invidious/helpers/tokens.cr b/src/invidious/helpers/tokens.cr
index a09ce90b..a44988cd 100644
--- a/src/invidious/helpers/tokens.cr
+++ b/src/invidious/helpers/tokens.cr
@@ -1,8 +1,8 @@
require "crypto/subtle"
-def generate_token(email, scopes, expire, key, db)
+def generate_token(email, scopes, expire, key)
session = "v1:#{Base64.urlsafe_encode(Random::Secure.random_bytes(32))}"
- PG_DB.exec("INSERT INTO session_ids VALUES ($1, $2, $3)", session, email, Time.utc)
+ Invidious::Database::SessionIDs.insert(session, email)
token = {
"session" => session,
@@ -19,7 +19,7 @@ def generate_token(email, scopes, expire, key, db)
return token.to_json
end
-def generate_response(session, scopes, key, db, expire = 6.hours, use_nonce = false)
+def generate_response(session, scopes, key, expire = 6.hours, use_nonce = false)
expire = Time.utc + expire
token = {
@@ -30,7 +30,7 @@ def generate_response(session, scopes, key, db, expire = 6.hours, use_nonce = fa
if use_nonce
nonce = Random::Secure.hex(16)
- db.exec("INSERT INTO nonces VALUES ($1, $2) ON CONFLICT DO NOTHING", nonce, expire)
+ Invidious::Database::Nonces.insert(nonce, expire)
token["nonce"] = nonce
end
@@ -42,11 +42,14 @@ end
def sign_token(key, hash)
string_to_sign = [] of String
+ # TODO: figure out which "key" variable is used
+ # Ameba reports a warning for "Lint/ShadowingOuterLocalVar" on this
+ # variable, but it's preferable to not touch that (works fine atm).
hash.each do |key, value|
next if key == "signature"
if value.is_a?(JSON::Any) && value.as_a?
- value = value.as_a.map { |i| i.as_s }
+ value = value.as_a.map(&.as_s)
end
case value
@@ -63,7 +66,7 @@ def sign_token(key, hash)
return Base64.urlsafe_encode(OpenSSL::HMAC.digest(:sha256, key, string_to_sign)).strip
end
-def validate_request(token, session, request, key, db, locale = nil)
+def validate_request(token, session, request, key, locale = nil)
case token
when String
token = JSON.parse(URI.decode_www_form(token)).as_h
@@ -82,7 +85,7 @@ def validate_request(token, session, request, key, db, locale = nil)
raise InfoException.new("Erroneous token")
end
- scopes = token["scopes"].as_a.map { |v| v.as_s }
+ scopes = token["scopes"].as_a.map(&.as_s)
scope = "#{request.method}:#{request.path.lchop("/api/v1/auth/").lstrip("/")}"
if !scopes_include_scope(scopes, scope)
raise InfoException.new("Invalid scope")
@@ -92,9 +95,9 @@ def validate_request(token, session, request, key, db, locale = nil)
raise InfoException.new("Invalid signature")
end
- if token["nonce"]? && (nonce = db.query_one?("SELECT * FROM nonces WHERE nonce = $1", token["nonce"], as: {String, Time}))
+ if token["nonce"]? && (nonce = Invidious::Database::Nonces.select(token["nonce"].as_s))
if nonce[1] > Time.utc
- db.exec("UPDATE nonces SET expire = $1 WHERE nonce = $2", Time.utc(1990, 1, 1), nonce[0])
+ Invidious::Database::Nonces.update_set_expired(nonce[0])
else
raise InfoException.new("Erroneous token")
end
@@ -105,11 +108,11 @@ end
def scope_includes_scope(scope, subset)
methods, endpoint = scope.split(":")
- methods = methods.split(";").map { |method| method.upcase }.reject { |method| method.empty? }.sort
+ methods = methods.split(";").map(&.upcase).reject(&.empty?).sort!
endpoint = endpoint.downcase
subset_methods, subset_endpoint = subset.split(":")
- subset_methods = subset_methods.split(";").map { |method| method.upcase }.sort
+ subset_methods = subset_methods.split(";").map(&.upcase).sort!
subset_endpoint = subset_endpoint.downcase
if methods.empty?
diff --git a/src/invidious/helpers/utils.cr b/src/invidious/helpers/utils.cr
index 10d4e6b6..4d9bb28d 100644
--- a/src/invidious/helpers/utils.cr
+++ b/src/invidious/helpers/utils.cr
@@ -1,70 +1,3 @@
-require "lsquic"
-require "pool/connection"
-
-def add_yt_headers(request)
- request.headers["user-agent"] ||= "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.97 Safari/537.36"
- request.headers["accept-charset"] ||= "ISO-8859-1,utf-8;q=0.7,*;q=0.7"
- request.headers["accept"] ||= "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"
- request.headers["accept-language"] ||= "en-us,en;q=0.5"
- return if request.resource.starts_with? "/sorry/index"
- request.headers["x-youtube-client-name"] ||= "1"
- request.headers["x-youtube-client-version"] ||= "2.20200609"
- # Preserve original cookies and add new YT consent cookie for EU servers
- request.headers["cookie"] = "#{request.headers["cookie"]?}; CONSENT=YES+"
- if !CONFIG.cookies.empty?
- request.headers["cookie"] = "#{(CONFIG.cookies.map { |c| "#{c.name}=#{c.value}" }).join("; ")}; #{request.headers["cookie"]?}"
- end
-end
-
-struct YoutubeConnectionPool
- property! url : URI
- property! capacity : Int32
- property! timeout : Float64
- property pool : ConnectionPool(QUIC::Client | HTTP::Client)
-
- def initialize(url : URI, @capacity = 5, @timeout = 5.0, use_quic = true)
- @url = url
- @pool = build_pool(use_quic)
- end
-
- def client(region = nil, &block)
- if region
- conn = make_client(url, region)
- response = yield conn
- else
- conn = pool.checkout
- begin
- response = yield conn
- rescue ex
- conn.close
- conn = QUIC::Client.new(url)
- conn.family = (url.host == "www.youtube.com") ? CONFIG.force_resolve : Socket::Family::INET
- conn.family = Socket::Family::INET if conn.family == Socket::Family::UNSPEC
- conn.before_request { |r| add_yt_headers(r) } if url.host == "www.youtube.com"
- response = yield conn
- ensure
- pool.checkin(conn)
- end
- end
-
- response
- end
-
- private def build_pool(use_quic)
- ConnectionPool(QUIC::Client | HTTP::Client).new(capacity: capacity, timeout: timeout) do
- if use_quic
- conn = QUIC::Client.new(url)
- else
- conn = HTTP::Client.new(url)
- end
- conn.family = (url.host == "www.youtube.com") ? CONFIG.force_resolve : Socket::Family::INET
- conn.family = Socket::Family::INET if conn.family == Socket::Family::UNSPEC
- conn.before_request { |r| add_yt_headers(r) } if url.host == "www.youtube.com"
- conn
- end
- end
-end
-
# See http://www.evanmiller.org/how-not-to-sort-by-average-rating.html
def ci_lower_bound(pos, n)
if n == 0
@@ -85,42 +18,18 @@ def elapsed_text(elapsed)
"#{(millis * 1000).round(2)}µs"
end
-def make_client(url : URI, region = nil)
- # TODO: Migrate any applicable endpoints to QUIC
- client = HTTPClient.new(url, OpenSSL::SSL::Context::Client.insecure)
- client.family = (url.host == "www.youtube.com") ? CONFIG.force_resolve : Socket::Family::UNSPEC
- client.before_request { |r| add_yt_headers(r) } if url.host == "www.youtube.com"
- client.read_timeout = 10.seconds
- client.connect_timeout = 10.seconds
-
- if region
- PROXY_LIST[region]?.try &.sample(40).each do |proxy|
- begin
- proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
- client.set_proxy(proxy)
- break
- rescue ex
- end
- end
- end
-
- return client
-end
-
-def make_client(url : URI, region = nil, &block)
- client = make_client(url, region)
- begin
- yield client
- ensure
- client.close
- end
-end
-
def decode_length_seconds(string)
- length_seconds = string.gsub(/[^0-9:]/, "").split(":").map &.to_i
+ length_seconds = string.gsub(/[^0-9:]/, "")
+ return 0_i32 if length_seconds.empty?
+
+ length_seconds = length_seconds.split(":").map { |x| x.to_i? || 0 }
length_seconds = [0] * (3 - length_seconds.size) + length_seconds
- length_seconds = Time::Span.new hours: length_seconds[0], minutes: length_seconds[1], seconds: length_seconds[2]
- length_seconds = length_seconds.total_seconds.to_i
+
+ length_seconds = Time::Span.new(
+ hours: length_seconds[0],
+ minutes: length_seconds[1],
+ seconds: length_seconds[2]
+ ).total_seconds.to_i32
return length_seconds
end
@@ -142,6 +51,24 @@ def recode_length_seconds(time)
end
end
+def decode_interval(string : String) : Time::Span
+ raw_minutes = string.try &.to_i32?
+
+ if !raw_minutes
+ hours = /(?<hours>\d+)h/.match(string).try &.["hours"].try &.to_i32
+ hours ||= 0
+
+ minutes = /(?<minutes>\d+)m(?!s)/.match(string).try &.["minutes"].try &.to_i32
+ minutes ||= 0
+
+ time = Time::Span.new(hours: hours, minutes: minutes)
+ else
+ time = Time::Span.new(minutes: raw_minutes)
+ end
+
+ return time
+end
+
def decode_time(string)
time = string.try &.to_f?
@@ -184,24 +111,27 @@ def decode_date(string : String)
else nil # Continue
end
- # String matches format "20 hours ago", "4 months ago"...
- date = string.split(" ")[-3, 3]
- delta = date[0].to_i
+ # String matches format "20 hours ago", "4 months ago", "20s ago", "15min ago"...
+ match = string.match(/(?<count>\d+) ?(?<span>[smhdwy]\w*) ago/)
- case date[1]
- when .includes? "second"
+ raise "Could not parse #{string}" if match.nil?
+
+ delta = match["count"].to_i
+
+ case match["span"]
+ when .starts_with? "s" # second(s)
delta = delta.seconds
- when .includes? "minute"
+ when .starts_with? "mi" # minute(s)
delta = delta.minutes
- when .includes? "hour"
+ when .starts_with? "h" # hour(s)
delta = delta.hours
- when .includes? "day"
+ when .starts_with? "d" # day(s)
delta = delta.days
- when .includes? "week"
+ when .starts_with? "w" # week(s)
delta = delta.weeks
- when .includes? "month"
+ when .starts_with? "mo" # month(s)
delta = delta.months
- when .includes? "year"
+ when .starts_with? "y" # year(s)
delta = delta.years
else
raise "Could not parse #{string}"
@@ -214,51 +144,47 @@ def recode_date(time : Time, locale)
span = Time.utc - time
if span.total_days > 365.0
- span = translate(locale, "`x` years", (span.total_days.to_i // 365).to_s)
+ return translate_count(locale, "generic_count_years", span.total_days.to_i // 365)
elsif span.total_days > 30.0
- span = translate(locale, "`x` months", (span.total_days.to_i // 30).to_s)
+ return translate_count(locale, "generic_count_months", span.total_days.to_i // 30)
elsif span.total_days > 7.0
- span = translate(locale, "`x` weeks", (span.total_days.to_i // 7).to_s)
+ return translate_count(locale, "generic_count_weeks", span.total_days.to_i // 7)
elsif span.total_hours > 24.0
- span = translate(locale, "`x` days", (span.total_days.to_i).to_s)
+ return translate_count(locale, "generic_count_days", span.total_days.to_i)
elsif span.total_minutes > 60.0
- span = translate(locale, "`x` hours", (span.total_hours.to_i).to_s)
+ return translate_count(locale, "generic_count_hours", span.total_hours.to_i)
elsif span.total_seconds > 60.0
- span = translate(locale, "`x` minutes", (span.total_minutes.to_i).to_s)
+ return translate_count(locale, "generic_count_minutes", span.total_minutes.to_i)
else
- span = translate(locale, "`x` seconds", (span.total_seconds.to_i).to_s)
+ return translate_count(locale, "generic_count_seconds", span.total_seconds.to_i)
end
-
- return span
end
def number_with_separator(number)
number.to_s.reverse.gsub(/(\d{3})(?=\d)/, "\\1,").reverse
end
-def short_text_to_number(short_text : String) : Int32
- case short_text
- when .ends_with? "M"
- number = short_text.rstrip(" mM").to_f
- number *= 1000000
- when .ends_with? "K"
- number = short_text.rstrip(" kK").to_f
- number *= 1000
- else
- number = short_text.rstrip(" ")
- end
+def short_text_to_number(short_text : String) : Int64
+ matches = /(?<number>\d+(\.\d+)?)\s?(?<suffix>[mMkKbB]?)/.match(short_text)
+ number = matches.try &.["number"].to_f || 0.0
- number = number.to_i
+ case matches.try &.["suffix"].downcase
+ when "k" then number *= 1_000
+ when "m" then number *= 1_000_000
+ when "b" then number *= 1_000_000_000
+ end
- return number
+ return number.to_i64
+rescue ex
+ return 0_i64
end
def number_to_short_text(number)
- seperated = number_with_separator(number).gsub(",", ".").split("")
- text = seperated.first(2).join
+ separated = number_with_separator(number).gsub(",", ".").split("")
+ text = separated.first(2).join
- if seperated[2]? && seperated[2] != "."
- text += seperated[2]
+ if separated[2]? && separated[2] != "."
+ text += separated[2]
end
text = text.rchop(".0")
@@ -298,7 +224,7 @@ def make_host_url(kemal_config)
# Add if non-standard port
if port != 80 && port != 443
- port = ":#{kemal_config.port}"
+ port = ":#{port}"
else
port = ""
end
@@ -336,7 +262,7 @@ def get_referer(env, fallback = "/", unroll = true)
end
referer = referer.request_target
- referer = "/" + referer.gsub(/[^\/?@&%=\-_.0-9a-zA-Z]/, "").lstrip("/\\")
+ referer = "/" + referer.gsub(/[^\/?@&%=\-_.:,*0-9a-zA-Z]/, "").lstrip("/\\")
if referer == env.request.path
referer = fallback
@@ -385,8 +311,8 @@ def parse_range(range)
end
ranges = range.lchop("bytes=").split(',')
- ranges.each do |range|
- start_range, end_range = range.split('-')
+ ranges.each do |r|
+ start_range, end_range = r.split('-')
start_range = start_range.to_i64? || 0_i64
end_range = end_range.to_i64?
@@ -397,15 +323,63 @@ def parse_range(range)
return 0_i64, nil
end
-def convert_theme(theme)
- case theme
- when "true"
- "dark"
- when "false"
- "light"
- when "", nil
- nil
- else
- theme
+def reduce_uri(uri : URI | String, max_length : Int32 = 50, suffix : String = "…") : String
+ str = uri.to_s.sub(/^https?:\/\//, "")
+ if str.size > max_length
+ str = "#{str[0, max_length]}#{suffix}"
+ end
+ return str
+end
+
+# Get the html link from a NavigationEndpoint or an innertubeCommand
+def parse_link_endpoint(endpoint : JSON::Any, text : String, video_id : String)
+ if url = endpoint.dig?("urlEndpoint", "url").try &.as_s
+ url = URI.parse(url)
+ displayed_url = text
+
+ if url.host == "youtu.be"
+ url = "/watch?v=#{url.request_target.lstrip('/')}"
+ elsif url.host.nil? || url.host.not_nil!.ends_with?("youtube.com")
+ if url.path == "/redirect"
+ # Sometimes, links can be corrupted (why?) so make sure to fallback
+ # nicely. See https://github.com/iv-org/invidious/issues/2682
+ url = url.query_params["q"]? || ""
+ displayed_url = url
+ else
+ url = url.request_target
+ displayed_url = "youtube.com#{url}"
+ end
+ end
+
+ text = %(<a href="#{url}">#{reduce_uri(displayed_url)}</a>)
+ elsif watch_endpoint = endpoint.dig?("watchEndpoint")
+ start_time = watch_endpoint["startTimeSeconds"]?.try &.as_i
+ link_video_id = watch_endpoint["videoId"].as_s
+
+ url = "/watch?v=#{link_video_id}"
+ url += "&t=#{start_time}" if !start_time.nil?
+
+ # If the current video ID (passed through from the caller function)
+ # is the same as the video ID in the link, add HTML attributes for
+ # the JS handler function that bypasses page reload.
+ #
+ # See: https://github.com/iv-org/invidious/issues/3063
+ if link_video_id == video_id
+ start_time ||= 0
+ text = %(<a href="#{url}" data-onclick="jump_to_time" data-jump-time="#{start_time}">#{reduce_uri(text)}</a>)
+ else
+ text = %(<a href="#{url}">#{text}</a>)
+ end
+ elsif url = endpoint.dig?("commandMetadata", "webCommandMetadata", "url").try &.as_s
+ if text.starts_with?(/\s?[@#]/)
+ # Handle "pings" in comments and hasthags differently
+ # See:
+ # - https://github.com/iv-org/invidious/issues/3038
+ # - https://github.com/iv-org/invidious/issues/3062
+ text = %(<a href="#{url}">#{text}</a>)
+ else
+ text = %(<a href="#{url}">#{reduce_uri(text)}</a>)
+ end
end
+ return text
end
diff --git a/src/invidious/helpers/webvtt.cr b/src/invidious/helpers/webvtt.cr
new file mode 100644
index 00000000..260d250f
--- /dev/null
+++ b/src/invidious/helpers/webvtt.cr
@@ -0,0 +1,81 @@
+# Namespace for logic relating to generating WebVTT files
+#
+# Probably not compliant to WebVTT's specs but it is enough for Invidious.
+module WebVTT
+ # A WebVTT builder generates WebVTT files
+ private class Builder
+ # See https://developer.mozilla.org/en-US/docs/Web/API/WebVTT_API#cue_payload
+ private ESCAPE_SUBSTITUTIONS = {
+ '&' => "&amp;",
+ '<' => "&lt;",
+ '>' => "&gt;",
+ '\u200E' => "&lrm;",
+ '\u200F' => "&rlm;",
+ '\u00A0' => "&nbsp;",
+ }
+
+ def initialize(@io : IO)
+ end
+
+ # Writes an vtt cue with the specified time stamp and contents
+ def cue(start_time : Time::Span, end_time : Time::Span, text : String)
+ timestamp(start_time, end_time)
+ @io << self.escape(text)
+ @io << "\n\n"
+ end
+
+ private def timestamp(start_time : Time::Span, end_time : Time::Span)
+ timestamp_component(start_time)
+ @io << " --> "
+ timestamp_component(end_time)
+
+ @io << '\n'
+ end
+
+ private def timestamp_component(timestamp : Time::Span)
+ @io << timestamp.hours.to_s.rjust(2, '0')
+ @io << ':' << timestamp.minutes.to_s.rjust(2, '0')
+ @io << ':' << timestamp.seconds.to_s.rjust(2, '0')
+ @io << '.' << timestamp.milliseconds.to_s.rjust(3, '0')
+ end
+
+ private def escape(text : String) : String
+ return text.gsub(ESCAPE_SUBSTITUTIONS)
+ end
+
+ def document(setting_fields : Hash(String, String)? = nil, &)
+ @io << "WEBVTT\n"
+
+ if setting_fields
+ setting_fields.each do |name, value|
+ @io << name << ": " << value << '\n'
+ end
+ end
+
+ @io << '\n'
+
+ yield
+ end
+ end
+
+ # Returns the resulting `String` of writing WebVTT to the yielded `WebVTT::Builder`
+ #
+ # ```
+ # string = WebVTT.build do |vtt|
+ # vtt.cue(Time::Span.new(seconds: 1), Time::Span.new(seconds: 2), "Line 1")
+ # vtt.cue(Time::Span.new(seconds: 2), Time::Span.new(seconds: 3), "Line 2")
+ # end
+ #
+ # string # => "WEBVTT\n\n00:00:01.000 --> 00:00:02.000\nLine 1\n\n00:00:02.000 --> 00:00:03.000\nLine 2\n\n"
+ # ```
+ #
+ # Accepts an optional settings fields hash to add settings attribute to the resulting vtt file.
+ def self.build(setting_fields : Hash(String, String)? = nil, &)
+ String.build do |str|
+ builder = Builder.new(str)
+ builder.document(setting_fields) do
+ yield builder
+ end
+ end
+ end
+end
diff --git a/src/invidious/helpers/youtube_api.cr b/src/invidious/helpers/youtube_api.cr
deleted file mode 100644
index 30413532..00000000
--- a/src/invidious/helpers/youtube_api.cr
+++ /dev/null
@@ -1,31 +0,0 @@
-#
-# This file contains youtube API wrappers
-#
-
-# Hard-coded constants required by the API
-HARDCODED_API_KEY = "AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8"
-HARDCODED_CLIENT_VERS = "2.20210318.08.00"
-
-def request_youtube_api_browse(continuation)
- # JSON Request data, required by the API
- data = {
- "context": {
- "client": {
- "hl": "en",
- "gl": "US",
- "clientName": "WEB",
- "clientVersion": HARDCODED_CLIENT_VERS,
- },
- },
- "continuation": continuation,
- }
-
- # Send the POST request and return result
- response = YT_POOL.client &.post(
- "/youtubei/v1/browse?key=#{HARDCODED_API_KEY}",
- headers: HTTP::Headers{"content-type" => "application/json"},
- body: data.to_json
- )
-
- return response.body
-end
diff --git a/src/invidious/http_server/utils.cr b/src/invidious/http_server/utils.cr
new file mode 100644
index 00000000..623a9177
--- /dev/null
+++ b/src/invidious/http_server/utils.cr
@@ -0,0 +1,41 @@
+require "uri"
+
+module Invidious::HttpServer
+ module Utils
+ extend self
+
+ def proxy_video_url(raw_url : String, *, region : String? = nil, absolute : Bool = false)
+ url = URI.parse(raw_url)
+
+ # Add some URL parameters
+ params = url.query_params
+ params["host"] = url.host.not_nil! # Should never be nil, in theory
+ params["region"] = region if !region.nil?
+ url.query_params = params
+
+ if absolute
+ return "#{HOST_URL}#{url.request_target}"
+ else
+ return url.request_target
+ end
+ end
+
+ def add_params_to_url(url : String | URI, params : URI::Params) : URI
+ url = URI.parse(url) if url.is_a?(String)
+
+ url_query = url.query || ""
+
+ # Append the parameters
+ url.query = String.build do |str|
+ if !url_query.empty?
+ str << url_query
+ str << '&'
+ end
+
+ str << params
+ end
+
+ return url
+ end
+ end
+end
diff --git a/src/invidious/jobs.cr b/src/invidious/jobs.cr
index ec0cad64..b6b673f7 100644
--- a/src/invidious/jobs.cr
+++ b/src/invidious/jobs.cr
@@ -1,12 +1,39 @@
module Invidious::Jobs
JOBS = [] of BaseJob
+ # Automatically generate a structure that wraps the various
+ # jobs' configs, so that the following YAML config can be used:
+ #
+ # jobs:
+ # job_name:
+ # enabled: true
+ # some_property: "value"
+ #
+ macro finished
+ struct JobsConfig
+ include YAML::Serializable
+
+ {% for sc in BaseJob.subclasses %}
+ # Voodoo macro to transform `Some::Module::CustomJob` to `custom`
+ {% class_name = sc.id.split("::").last.id.gsub(/Job$/, "").underscore %}
+
+ getter {{ class_name }} = {{ sc.name }}::Config.new
+ {% end %}
+
+ def initialize
+ end
+ end
+ end
+
def self.register(job : BaseJob)
JOBS << job
end
def self.start_all
JOBS.each do |job|
+ # Don't run the main rountine if the job is disabled by config
+ next if job.disabled?
+
spawn { job.begin }
end
end
diff --git a/src/invidious/jobs/base_job.cr b/src/invidious/jobs/base_job.cr
index 47e75864..f90f0bfe 100644
--- a/src/invidious/jobs/base_job.cr
+++ b/src/invidious/jobs/base_job.cr
@@ -1,3 +1,33 @@
abstract class Invidious::Jobs::BaseJob
abstract def begin
+
+ # When this base job class is inherited, make sure to define
+ # a basic "Config" structure, that contains the "enable" property,
+ # and to create the associated instance property.
+ #
+ macro inherited
+ macro finished
+ # This config structure can be expanded as required.
+ struct Config
+ include YAML::Serializable
+
+ property enable = true
+
+ def initialize
+ end
+ end
+
+ property cfg = Config.new
+
+ # Return true if job is enabled by config
+ protected def enabled? : Bool
+ return (@cfg.enable == true)
+ end
+
+ # Return true if job is disabled by config
+ protected def disabled? : Bool
+ return (@cfg.enable == false)
+ end
+ end
+ end
end
diff --git a/src/invidious/jobs/bypass_captcha_job.cr b/src/invidious/jobs/bypass_captcha_job.cr
deleted file mode 100644
index 4269e123..00000000
--- a/src/invidious/jobs/bypass_captcha_job.cr
+++ /dev/null
@@ -1,131 +0,0 @@
-class Invidious::Jobs::BypassCaptchaJob < Invidious::Jobs::BaseJob
- def begin
- loop do
- begin
- {"/watch?v=jNQXAC9IVRw&gl=US&hl=en&has_verified=1&bpctr=9999999999", produce_channel_videos_url(ucid: "UC4QobU6STFB0P71PMvOGN5A")}.each do |path|
- response = YT_POOL.client &.get(path)
- if response.body.includes?("To continue with your YouTube experience, please fill out the form below.")
- html = XML.parse_html(response.body)
- form = html.xpath_node(%(//form[@action="/das_captcha"])).not_nil!
- site_key = form.xpath_node(%(.//div[@id="recaptcha"])).try &.["data-sitekey"]
- s_value = form.xpath_node(%(.//div[@id="recaptcha"])).try &.["data-s"]
-
- inputs = {} of String => String
- form.xpath_nodes(%(.//input[@name])).map do |node|
- inputs[node["name"]] = node["value"]
- end
-
- headers = response.cookies.add_request_headers(HTTP::Headers.new)
-
- response = JSON.parse(HTTP::Client.post(CONFIG.captcha_api_url + "/createTask",
- headers: HTTP::Headers{"Content-Type" => "application/json"}, body: {
- "clientKey" => CONFIG.captcha_key,
- "task" => {
- "type" => "NoCaptchaTaskProxyless",
- "websiteURL" => "https://www.youtube.com#{path}",
- "websiteKey" => site_key,
- "recaptchaDataSValue" => s_value,
- },
- }.to_json).body)
-
- raise response["error"].as_s if response["error"]?
- task_id = response["taskId"].as_i
-
- loop do
- sleep 10.seconds
-
- response = JSON.parse(HTTP::Client.post(CONFIG.captcha_api_url + "/getTaskResult",
- headers: HTTP::Headers{"Content-Type" => "application/json"}, body: {
- "clientKey" => CONFIG.captcha_key,
- "taskId" => task_id,
- }.to_json).body)
-
- if response["status"]?.try &.== "ready"
- break
- elsif response["errorId"]?.try &.as_i != 0
- raise response["errorDescription"].as_s
- end
- end
-
- inputs["g-recaptcha-response"] = response["solution"]["gRecaptchaResponse"].as_s
- headers["Cookies"] = response["solution"]["cookies"].as_h?.try &.map { |k, v| "#{k}=#{v}" }.join("; ") || ""
- response = YT_POOL.client &.post("/das_captcha", headers, form: inputs)
-
- response.cookies
- .select { |cookie| cookie.name != "PREF" }
- .each { |cookie| CONFIG.cookies << cookie }
-
- # Persist cookies between runs
- File.write("config/config.yml", CONFIG.to_yaml)
- elsif response.headers["Location"]?.try &.includes?("/sorry/index")
- location = response.headers["Location"].try { |u| URI.parse(u) }
- headers = HTTP::Headers{":authority" => location.host.not_nil!}
- response = YT_POOL.client &.get(location.request_target, headers)
-
- html = XML.parse_html(response.body)
- form = html.xpath_node(%(//form[@action="index"])).not_nil!
- site_key = form.xpath_node(%(.//div[@id="recaptcha"])).try &.["data-sitekey"]
- s_value = form.xpath_node(%(.//div[@id="recaptcha"])).try &.["data-s"]
-
- inputs = {} of String => String
- form.xpath_nodes(%(.//input[@name])).map do |node|
- inputs[node["name"]] = node["value"]
- end
-
- captcha_client = HTTPClient.new(URI.parse(CONFIG.captcha_api_url))
- captcha_client.family = CONFIG.force_resolve || Socket::Family::INET
- response = JSON.parse(captcha_client.post("/createTask",
- headers: HTTP::Headers{"Content-Type" => "application/json"}, body: {
- "clientKey" => CONFIG.captcha_key,
- "task" => {
- "type" => "NoCaptchaTaskProxyless",
- "websiteURL" => location.to_s,
- "websiteKey" => site_key,
- "recaptchaDataSValue" => s_value,
- },
- }.to_json).body)
-
- captcha_client.close
-
- raise response["error"].as_s if response["error"]?
- task_id = response["taskId"].as_i
-
- loop do
- sleep 10.seconds
-
- response = JSON.parse(captcha_client.post("/getTaskResult",
- headers: HTTP::Headers{"Content-Type" => "application/json"}, body: {
- "clientKey" => CONFIG.captcha_key,
- "taskId" => task_id,
- }.to_json).body)
-
- if response["status"]?.try &.== "ready"
- break
- elsif response["errorId"]?.try &.as_i != 0
- raise response["errorDescription"].as_s
- end
- end
-
- inputs["g-recaptcha-response"] = response["solution"]["gRecaptchaResponse"].as_s
- headers["Cookies"] = response["solution"]["cookies"].as_h?.try &.map { |k, v| "#{k}=#{v}" }.join("; ") || ""
- response = YT_POOL.client &.post("/sorry/index", headers: headers, form: inputs)
- headers = HTTP::Headers{
- "Cookie" => URI.parse(response.headers["location"]).query_params["google_abuse"].split(";")[0],
- }
- cookies = HTTP::Cookies.from_headers(headers)
-
- cookies.each { |cookie| CONFIG.cookies << cookie }
-
- # Persist cookies between runs
- File.write("config/config.yml", CONFIG.to_yaml)
- end
- end
- rescue ex
- LOGGER.error("BypassCaptchaJob: #{ex.message}")
- ensure
- sleep 1.minute
- Fiber.yield
- end
- end
- end
-end
diff --git a/src/invidious/jobs/clear_expired_items_job.cr b/src/invidious/jobs/clear_expired_items_job.cr
new file mode 100644
index 00000000..17191aac
--- /dev/null
+++ b/src/invidious/jobs/clear_expired_items_job.cr
@@ -0,0 +1,27 @@
+class Invidious::Jobs::ClearExpiredItemsJob < Invidious::Jobs::BaseJob
+ # Remove items (videos, nonces, etc..) whose cache is outdated every hour.
+ # Removes the need for a cron job.
+ def begin
+ loop do
+ failed = false
+
+ LOGGER.info("jobs: running ClearExpiredItems job")
+
+ begin
+ Invidious::Database::Videos.delete_expired
+ Invidious::Database::Nonces.delete_expired
+ rescue DB::Error
+ failed = true
+ end
+
+ # Retry earlier than scheduled on DB error
+ if failed
+ LOGGER.info("jobs: ClearExpiredItems failed. Retrying in 10 minutes.")
+ sleep 10.minutes
+ else
+ LOGGER.info("jobs: ClearExpiredItems done.")
+ sleep 1.hour
+ end
+ end
+ end
+end
diff --git a/src/invidious/jobs/instance_refresh_job.cr b/src/invidious/jobs/instance_refresh_job.cr
new file mode 100644
index 00000000..cb4280b9
--- /dev/null
+++ b/src/invidious/jobs/instance_refresh_job.cr
@@ -0,0 +1,97 @@
+class Invidious::Jobs::InstanceListRefreshJob < Invidious::Jobs::BaseJob
+ # We update the internals of a constant as so it can be accessed from anywhere
+ # within the codebase
+ #
+ # "INSTANCES" => Array(Tuple(String, String)) # region, instance
+
+ INSTANCES = {"INSTANCES" => [] of Tuple(String, String)}
+
+ def initialize
+ end
+
+ def begin
+ loop do
+ refresh_instances
+ LOGGER.info("InstanceListRefreshJob: Done, sleeping for 30 minutes")
+ sleep 30.minute
+ Fiber.yield
+ end
+ end
+
+ # Refreshes the list of instances used for redirects.
+ #
+ # Does the following three checks for each instance
+ # - Is it a clear-net instance?
+ # - Is it an instance with a good uptime?
+ # - Is it an updated instance?
+ private def refresh_instances
+ raw_instance_list = self.fetch_instances
+ filtered_instance_list = [] of Tuple(String, String)
+
+ raw_instance_list.each do |instance_data|
+ # TODO allow Tor hidden service instances when the current instance
+ # is also a hidden service. Same for i2p and any other non-clearnet instances.
+ begin
+ domain = instance_data[0]
+ info = instance_data[1]
+ stats = info["stats"]
+
+ next unless info["type"] == "https"
+ next if bad_uptime?(info["monitor"])
+ next if outdated?(stats["software"]["version"])
+
+ filtered_instance_list << {info["region"].as_s, domain.as_s}
+ rescue ex
+ if domain
+ LOGGER.info("InstanceListRefreshJob: failed to parse information from '#{domain}' because \"#{ex}\"\n\"#{ex.backtrace.join('\n')}\" ")
+ else
+ LOGGER.info("InstanceListRefreshJob: failed to parse information from an instance because \"#{ex}\"\n\"#{ex.backtrace.join('\n')}\" ")
+ end
+ end
+ end
+
+ if !filtered_instance_list.empty?
+ INSTANCES["INSTANCES"] = filtered_instance_list
+ end
+ end
+
+ # Fetches information regarding instances from api.invidious.io or an otherwise configured URL
+ private def fetch_instances : Array(JSON::Any)
+ begin
+ # We directly call the stdlib HTTP::Client here as it allows us to negate the effects
+ # of the force_resolve config option. This is needed as api.invidious.io does not support ipv6
+ # and as such the following request raises if we were to use force_resolve with the ipv6 value.
+ instance_api_client = HTTP::Client.new(URI.parse("https://api.invidious.io"))
+
+ # Timeouts
+ instance_api_client.connect_timeout = 10.seconds
+ instance_api_client.dns_timeout = 10.seconds
+
+ raw_instance_list = JSON.parse(instance_api_client.get("/instances.json").body).as_a
+ instance_api_client.close
+ rescue ex : Socket::ConnectError | IO::TimeoutError | JSON::ParseException
+ raw_instance_list = [] of JSON::Any
+ end
+
+ return raw_instance_list
+ end
+
+ # Checks if the given target instance is outdated
+ private def outdated?(target_instance_version) : Bool
+ remote_commit_date = target_instance_version.as_s.match(/\d{4}\.\d{2}\.\d{2}/)
+ return false if !remote_commit_date
+
+ remote_commit_date = Time.parse(remote_commit_date[0], "%Y.%m.%d", Time::Location::UTC)
+ local_commit_date = Time.parse(CURRENT_VERSION, "%Y.%m.%d", Time::Location::UTC)
+
+ return (remote_commit_date - local_commit_date).abs.days > 30
+ end
+
+ # Checks if the uptime of the target instance is greater than 90% over a 30 day period
+ private def bad_uptime?(target_instance_health_monitor) : Bool
+ return true if !target_instance_health_monitor["down"].as_bool == false
+ return true if target_instance_health_monitor["uptime"].as_f < 90
+
+ return false
+ end
+end
diff --git a/src/invidious/jobs/notification_job.cr b/src/invidious/jobs/notification_job.cr
index 2f525e08..b445107b 100644
--- a/src/invidious/jobs/notification_job.cr
+++ b/src/invidious/jobs/notification_job.cr
@@ -1,12 +1,12 @@
class Invidious::Jobs::NotificationJob < Invidious::Jobs::BaseJob
- private getter connection_channel : Channel({Bool, Channel(PQ::Notification)})
+ private getter connection_channel : ::Channel({Bool, ::Channel(PQ::Notification)})
private getter pg_url : URI
def initialize(@connection_channel, @pg_url)
end
def begin
- connections = [] of Channel(PQ::Notification)
+ connections = [] of ::Channel(PQ::Notification)
PG.connect_listen(pg_url, "notifications") { |event| connections.each(&.send(event)) }
diff --git a/src/invidious/jobs/pull_popular_videos_job.cr b/src/invidious/jobs/pull_popular_videos_job.cr
index 7a8ab84e..dc785bae 100644
--- a/src/invidious/jobs/pull_popular_videos_job.cr
+++ b/src/invidious/jobs/pull_popular_videos_job.cr
@@ -1,11 +1,4 @@
class Invidious::Jobs::PullPopularVideosJob < Invidious::Jobs::BaseJob
- QUERY = <<-SQL
- SELECT DISTINCT ON (ucid) *
- FROM channel_videos
- WHERE ucid IN (SELECT channel FROM (SELECT UNNEST(subscriptions) AS channel FROM users) AS d
- GROUP BY channel ORDER BY COUNT(channel) DESC LIMIT 40)
- ORDER BY ucid, published DESC
- SQL
POPULAR_VIDEOS = Atomic.new([] of ChannelVideo)
private getter db : DB::Database
@@ -14,9 +7,9 @@ class Invidious::Jobs::PullPopularVideosJob < Invidious::Jobs::BaseJob
def begin
loop do
- videos = db.query_all(QUERY, as: ChannelVideo)
- .sort_by(&.published)
- .reverse
+ videos = Invidious::Database::ChannelVideos.select_popular_videos
+ .sort_by!(&.published)
+ .reverse!
POPULAR_VIDEOS.set(videos)
diff --git a/src/invidious/jobs/refresh_channels_job.cr b/src/invidious/jobs/refresh_channels_job.cr
index fbe6d381..80812a63 100644
--- a/src/invidious/jobs/refresh_channels_job.cr
+++ b/src/invidious/jobs/refresh_channels_job.cr
@@ -8,12 +8,12 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
max_fibers = CONFIG.channel_threads
lim_fibers = max_fibers
active_fibers = 0
- active_channel = Channel(Bool).new
- backoff = 1.seconds
+ active_channel = ::Channel(Bool).new
+ backoff = 2.minutes
loop do
LOGGER.debug("RefreshChannelsJob: Refreshing all channels")
- db.query("SELECT id FROM channels ORDER BY updated") do |rs|
+ PG_DB.query("SELECT id FROM channels ORDER BY updated") do |rs|
rs.each do
id = rs.read(String)
@@ -30,16 +30,16 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
spawn do
begin
LOGGER.trace("RefreshChannelsJob: #{id} fiber : Fetching channel")
- channel = fetch_channel(id, db, CONFIG.full_refresh)
+ channel = fetch_channel(id, pull_all_videos: CONFIG.full_refresh)
lim_fibers = max_fibers
LOGGER.trace("RefreshChannelsJob: #{id} fiber : Updating DB")
- db.exec("UPDATE channels SET updated = $1, author = $2, deleted = false WHERE id = $3", Time.utc, channel.author, id)
+ Invidious::Database::Channels.update_author(id, channel.author)
rescue ex
LOGGER.error("RefreshChannelsJob: #{id} : #{ex.message}")
if ex.message == "Deleted or invalid channel"
- db.exec("UPDATE channels SET updated = $1, deleted = true WHERE id = $2", Time.utc, id)
+ Invidious::Database::Channels.update_mark_deleted(id)
else
lim_fibers = 1
LOGGER.error("RefreshChannelsJob: #{id} fiber : backing off for #{backoff}s")
@@ -58,8 +58,8 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
end
end
- LOGGER.debug("RefreshChannelsJob: Done, sleeping for one minute")
- sleep 1.minute
+ LOGGER.debug("RefreshChannelsJob: Done, sleeping for #{CONFIG.channel_refresh_interval}")
+ sleep CONFIG.channel_refresh_interval
Fiber.yield
end
end
diff --git a/src/invidious/jobs/refresh_feeds_job.cr b/src/invidious/jobs/refresh_feeds_job.cr
index 926c27fa..4f8130df 100644
--- a/src/invidious/jobs/refresh_feeds_job.cr
+++ b/src/invidious/jobs/refresh_feeds_job.cr
@@ -7,7 +7,7 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
def begin
max_fibers = CONFIG.feed_threads
active_fibers = 0
- active_channel = Channel(Bool).new
+ active_channel = ::Channel(Bool).new
loop do
db.query("SELECT email FROM users WHERE feed_needs_update = true OR feed_needs_update IS NULL") do |rs|
@@ -25,7 +25,7 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
spawn do
begin
# Drop outdated views
- column_array = get_column_array(db, view_name)
+ column_array = Invidious::Database.get_column_array(db, view_name)
ChannelVideo.type_array.each_with_index do |name, i|
if name != column_array[i]?
LOGGER.info("RefreshFeedsJob: DROP MATERIALIZED VIEW #{view_name}")
diff --git a/src/invidious/jobs/statistics_refresh_job.cr b/src/invidious/jobs/statistics_refresh_job.cr
index 6569c0a1..66c91ad5 100644
--- a/src/invidious/jobs/statistics_refresh_job.cr
+++ b/src/invidious/jobs/statistics_refresh_job.cr
@@ -18,6 +18,13 @@ class Invidious::Jobs::StatisticsRefreshJob < Invidious::Jobs::BaseJob
"updatedAt" => Time.utc.to_unix,
"lastChannelRefreshedAt" => 0_i64,
},
+
+ #
+ # "totalRequests" => 0_i64,
+ # "successfulRequests" => 0_i64
+ # "ratio" => 0_i64
+ #
+ "playback" => {} of String => Int64 | Float64,
}
private getter db : DB::Database
@@ -30,7 +37,7 @@ class Invidious::Jobs::StatisticsRefreshJob < Invidious::Jobs::BaseJob
loop do
refresh_stats
- sleep 1.minute
+ sleep 10.minute
Fiber.yield
end
end
@@ -47,12 +54,17 @@ class Invidious::Jobs::StatisticsRefreshJob < Invidious::Jobs::BaseJob
private def refresh_stats
users = STATISTICS.dig("usage", "users").as(Hash(String, Int64))
- users["total"] = db.query_one("SELECT count(*) FROM users", as: Int64)
- users["activeHalfyear"] = db.query_one("SELECT count(*) FROM users WHERE CURRENT_TIMESTAMP - updated < '6 months'", as: Int64)
- users["activeMonth"] = db.query_one("SELECT count(*) FROM users WHERE CURRENT_TIMESTAMP - updated < '1 month'", as: Int64)
+
+ users["total"] = Invidious::Database::Statistics.count_users_total
+ users["activeHalfyear"] = Invidious::Database::Statistics.count_users_active_6m
+ users["activeMonth"] = Invidious::Database::Statistics.count_users_active_1m
+
STATISTICS["metadata"] = {
"updatedAt" => Time.utc.to_unix,
- "lastChannelRefreshedAt" => db.query_one?("SELECT updated FROM channels ORDER BY updated DESC LIMIT 1", as: Time).try &.to_unix || 0_i64,
+ "lastChannelRefreshedAt" => Invidious::Database::Statistics.channel_last_update.try &.to_unix || 0_i64,
}
+
+ # Reset playback requests tracker
+ STATISTICS["playback"] = {} of String => Int64 | Float64
end
end
diff --git a/src/invidious/jobs/subscribe_to_feeds_job.cr b/src/invidious/jobs/subscribe_to_feeds_job.cr
index a431a48a..8584fb9c 100644
--- a/src/invidious/jobs/subscribe_to_feeds_job.cr
+++ b/src/invidious/jobs/subscribe_to_feeds_job.cr
@@ -12,7 +12,7 @@ class Invidious::Jobs::SubscribeToFeedsJob < Invidious::Jobs::BaseJob
end
active_fibers = 0
- active_channel = Channel(Bool).new
+ active_channel = ::Channel(Bool).new
loop do
db.query_all("SELECT id FROM channels WHERE CURRENT_TIMESTAMP - subscribed > interval '4 days' OR subscribed IS NULL") do |rs|
diff --git a/src/invidious/jobs/update_decrypt_function_job.cr b/src/invidious/jobs/update_decrypt_function_job.cr
deleted file mode 100644
index 6fa0ae1b..00000000
--- a/src/invidious/jobs/update_decrypt_function_job.cr
+++ /dev/null
@@ -1,14 +0,0 @@
-class Invidious::Jobs::UpdateDecryptFunctionJob < Invidious::Jobs::BaseJob
- def begin
- loop do
- begin
- DECRYPT_FUNCTION.update_decrypt_function
- rescue ex
- LOGGER.error("UpdateDecryptFunctionJob : #{ex.message}")
- ensure
- sleep 1.minute
- Fiber.yield
- end
- end
- end
-end
diff --git a/src/invidious/jsonify/api_v1/common.cr b/src/invidious/jsonify/api_v1/common.cr
new file mode 100644
index 00000000..64b06465
--- /dev/null
+++ b/src/invidious/jsonify/api_v1/common.cr
@@ -0,0 +1,18 @@
+require "json"
+
+module Invidious::JSONify::APIv1
+ extend self
+
+ def thumbnails(json : JSON::Builder, id : String)
+ json.array do
+ build_thumbnails(id).each do |thumbnail|
+ json.object do
+ json.field "quality", thumbnail[:name]
+ json.field "url", "#{thumbnail[:host]}/vi/#{id}/#{thumbnail["url"]}.jpg"
+ json.field "width", thumbnail[:width]
+ json.field "height", thumbnail[:height]
+ end
+ end
+ end
+ end
+end
diff --git a/src/invidious/jsonify/api_v1/video_json.cr b/src/invidious/jsonify/api_v1/video_json.cr
new file mode 100644
index 00000000..08cd533f
--- /dev/null
+++ b/src/invidious/jsonify/api_v1/video_json.cr
@@ -0,0 +1,295 @@
+require "json"
+
+module Invidious::JSONify::APIv1
+ extend self
+
+ def video(video : Video, json : JSON::Builder, *, locale : String?, proxy : Bool = false)
+ json.object do
+ json.field "type", video.video_type
+
+ json.field "title", video.title
+ json.field "videoId", video.id
+
+ json.field "error", video.info["reason"] if video.info["reason"]?
+
+ json.field "videoThumbnails" do
+ self.thumbnails(json, video.id)
+ end
+ json.field "storyboards" do
+ self.storyboards(json, video.id, video.storyboards)
+ end
+
+ json.field "description", video.description
+ json.field "descriptionHtml", video.description_html
+ json.field "published", video.published.to_unix
+ json.field "publishedText", translate(locale, "`x` ago", recode_date(video.published, locale))
+ json.field "keywords", video.keywords
+
+ json.field "viewCount", video.views
+ json.field "likeCount", video.likes
+ json.field "dislikeCount", 0_i64
+
+ json.field "paid", video.paid
+ json.field "premium", video.premium
+ json.field "isFamilyFriendly", video.is_family_friendly
+ json.field "allowedRegions", video.allowed_regions
+ json.field "genre", video.genre
+ json.field "genreUrl", video.genre_url
+
+ json.field "author", video.author
+ json.field "authorId", video.ucid
+ json.field "authorUrl", "/channel/#{video.ucid}"
+ json.field "authorVerified", video.author_verified
+
+ json.field "authorThumbnails" do
+ json.array do
+ qualities = {32, 48, 76, 100, 176, 512}
+
+ qualities.each do |quality|
+ json.object do
+ json.field "url", video.author_thumbnail.gsub(/=s\d+/, "=s#{quality}")
+ json.field "width", quality
+ json.field "height", quality
+ end
+ end
+ end
+ end
+
+ json.field "subCountText", video.sub_count_text
+
+ json.field "lengthSeconds", video.length_seconds
+ json.field "allowRatings", video.allow_ratings
+ json.field "rating", 0_i64
+ json.field "isListed", video.is_listed
+ json.field "liveNow", video.live_now
+ json.field "isPostLiveDvr", video.post_live_dvr
+ json.field "isUpcoming", video.upcoming?
+
+ if video.premiere_timestamp
+ json.field "premiereTimestamp", video.premiere_timestamp.try &.to_unix
+ end
+
+ if hlsvp = video.hls_manifest_url
+ hlsvp = hlsvp.gsub("https://manifest.googlevideo.com", HOST_URL)
+ json.field "hlsUrl", hlsvp
+ end
+
+ json.field "dashUrl", "#{HOST_URL}/api/manifest/dash/id/#{video.id}"
+
+ json.field "adaptiveFormats" do
+ json.array do
+ video.adaptive_fmts.each do |fmt|
+ json.object do
+ # Only available on regular videos, not livestreams/OTF streams
+ if init_range = fmt["initRange"]?
+ json.field "init", "#{init_range["start"]}-#{init_range["end"]}"
+ end
+ if index_range = fmt["indexRange"]?
+ json.field "index", "#{index_range["start"]}-#{index_range["end"]}"
+ end
+
+ # Not available on MPEG-4 Timed Text (`text/mp4`) streams (livestreams only)
+ json.field "bitrate", fmt["bitrate"].as_i.to_s if fmt["bitrate"]?
+
+ if proxy
+ json.field "url", Invidious::HttpServer::Utils.proxy_video_url(
+ fmt["url"].to_s, absolute: true
+ )
+ else
+ json.field "url", fmt["url"]
+ end
+
+ json.field "itag", fmt["itag"].as_i.to_s
+ json.field "type", fmt["mimeType"]
+ json.field "clen", fmt["contentLength"]? || "-1"
+
+ # Last modified is a unix timestamp with µS, with the dot omitted.
+ # E.g: 1638056732(.)141582
+ #
+ # On livestreams, it's not present, so always fall back to the
+ # current unix timestamp (up to mS precision) for compatibility.
+ last_modified = fmt["lastModified"]?
+ last_modified ||= "#{Time.utc.to_unix_ms}000"
+ json.field "lmt", last_modified
+
+ json.field "projectionType", fmt["projectionType"]
+
+ height = fmt["height"]?.try &.as_i
+ width = fmt["width"]?.try &.as_i
+
+ fps = fmt["fps"]?.try &.as_i
+
+ if fps
+ json.field "fps", fps
+ end
+
+ if height && width
+ json.field "size", "#{width}x#{height}"
+ json.field "resolution", "#{height}p"
+
+ quality_label = "#{width > height ? height : width}p"
+
+ if fps && fps > 30
+ quality_label += fps.to_s
+ end
+
+ json.field "qualityLabel", quality_label
+ end
+
+ if fmt_info = Invidious::Videos::Formats.itag_to_metadata?(fmt["itag"])
+ json.field "container", fmt_info["ext"]
+ json.field "encoding", fmt_info["vcodec"]? || fmt_info["acodec"]
+ end
+
+ # Livestream chunk infos
+ json.field "targetDurationSec", fmt["targetDurationSec"].as_i if fmt.has_key?("targetDurationSec")
+ json.field "maxDvrDurationSec", fmt["maxDvrDurationSec"].as_i if fmt.has_key?("maxDvrDurationSec")
+
+ # Audio-related data
+ json.field "audioQuality", fmt["audioQuality"] if fmt.has_key?("audioQuality")
+ json.field "audioSampleRate", fmt["audioSampleRate"].as_s.to_i if fmt.has_key?("audioSampleRate")
+ json.field "audioChannels", fmt["audioChannels"] if fmt.has_key?("audioChannels")
+
+ # Extra misc stuff
+ json.field "colorInfo", fmt["colorInfo"] if fmt.has_key?("colorInfo")
+ json.field "captionTrack", fmt["captionTrack"] if fmt.has_key?("captionTrack")
+ end
+ end
+ end
+ end
+
+ json.field "formatStreams" do
+ json.array do
+ video.fmt_stream.each do |fmt|
+ json.object do
+ if proxy
+ json.field "url", Invidious::HttpServer::Utils.proxy_video_url(
+ fmt["url"].to_s, absolute: true
+ )
+ else
+ json.field "url", fmt["url"]
+ end
+ json.field "itag", fmt["itag"].as_i.to_s
+ json.field "type", fmt["mimeType"]
+ json.field "quality", fmt["quality"]
+
+ json.field "bitrate", fmt["bitrate"].as_i.to_s if fmt["bitrate"]?
+
+ height = fmt["height"]?.try &.as_i
+ width = fmt["width"]?.try &.as_i
+
+ fps = fmt["fps"]?.try &.as_i
+
+ if fps
+ json.field "fps", fps
+ end
+
+ if height && width
+ json.field "size", "#{width}x#{height}"
+ json.field "resolution", "#{height}p"
+
+ quality_label = "#{width > height ? height : width}p"
+
+ if fps && fps > 30
+ quality_label += fps.to_s
+ end
+
+ json.field "qualityLabel", quality_label
+ end
+
+ if fmt_info = Invidious::Videos::Formats.itag_to_metadata?(fmt["itag"])
+ json.field "container", fmt_info["ext"]
+ json.field "encoding", fmt_info["vcodec"]? || fmt_info["acodec"]
+ end
+ end
+ end
+ end
+ end
+
+ json.field "captions" do
+ json.array do
+ video.captions.each do |caption|
+ json.object do
+ json.field "label", caption.name
+ json.field "language_code", caption.language_code
+ json.field "url", "/api/v1/captions/#{video.id}?label=#{URI.encode_www_form(caption.name)}"
+ end
+ end
+ end
+ end
+
+ if !video.music.empty?
+ json.field "musicTracks" do
+ json.array do
+ video.music.each do |music|
+ json.object do
+ json.field "song", music.song
+ json.field "artist", music.artist
+ json.field "album", music.album
+ json.field "license", music.license
+ end
+ end
+ end
+ end
+ end
+
+ json.field "recommendedVideos" do
+ json.array do
+ video.related_videos.each do |rv|
+ if rv["id"]?
+ json.object do
+ json.field "videoId", rv["id"]
+ json.field "title", rv["title"]
+ json.field "videoThumbnails" do
+ self.thumbnails(json, rv["id"])
+ end
+
+ json.field "author", rv["author"]
+ json.field "authorUrl", "/channel/#{rv["ucid"]?}"
+ json.field "authorId", rv["ucid"]?
+ json.field "authorVerified", rv["author_verified"] == "true"
+ if rv["author_thumbnail"]?
+ json.field "authorThumbnails" do
+ json.array do
+ qualities = {32, 48, 76, 100, 176, 512}
+
+ qualities.each do |quality|
+ json.object do
+ json.field "url", rv["author_thumbnail"].gsub(/s\d+-/, "s#{quality}-")
+ json.field "width", quality
+ json.field "height", quality
+ end
+ end
+ end
+ end
+ end
+
+ json.field "lengthSeconds", rv["length_seconds"]?.try &.to_i
+ json.field "viewCountText", rv["short_view_count"]?
+ json.field "viewCount", rv["view_count"]?.try &.empty? ? nil : rv["view_count"].to_i64
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ def storyboards(json, id, storyboards)
+ json.array do
+ storyboards.each do |sb|
+ json.object do
+ json.field "url", "/api/v1/storyboards/#{id}?width=#{sb.width}&height=#{sb.height}"
+ json.field "templateUrl", sb.url.to_s
+ json.field "width", sb.width
+ json.field "height", sb.height
+ json.field "count", sb.count
+ json.field "interval", sb.interval
+ json.field "storyboardWidth", sb.columns
+ json.field "storyboardHeight", sb.rows
+ json.field "storyboardCount", sb.images_count
+ end
+ end
+ end
+ end
+end
diff --git a/src/invidious/mixes.cr b/src/invidious/mixes.cr
index 55b01174..823ca85b 100644
--- a/src/invidious/mixes.cr
+++ b/src/invidious/mixes.cr
@@ -72,7 +72,7 @@ def fetch_mix(rdid, video_id, cookies = nil, locale = nil)
videos += next_page.videos
end
- videos.uniq! { |video| video.id }
+ videos.uniq!(&.id)
videos = videos.first(50)
return Mix.new({
title: mix_title,
@@ -97,7 +97,7 @@ def template_mix(mix)
<li class="pure-menu-item">
<a href="/watch?v=#{video["videoId"]}&list=#{mix["mixId"]}">
<div class="thumbnail">
- <img class="thumbnail" src="/vi/#{video["videoId"]}/mqdefault.jpg">
+ <img loading="lazy" class="thumbnail" src="/vi/#{video["videoId"]}/mqdefault.jpg" alt="" />
<p class="length">#{recode_length_seconds(video["lengthSeconds"].as_i)}</p>
</div>
<p style="width:100%">#{video["title"]}</p>
diff --git a/src/invidious/playlists.cr b/src/invidious/playlists.cr
index 073a9986..a51e88b4 100644
--- a/src/invidious/playlists.cr
+++ b/src/invidious/playlists.cr
@@ -11,7 +11,7 @@ struct PlaylistVideo
property index : Int64
property live_now : Bool
- def to_xml(auto_generated, xml : XML::Builder)
+ def to_xml(xml : XML::Builder)
xml.element("entry") do
xml.element("id") { xml.text "yt:video:#{self.id}" }
xml.element("yt:videoId") { xml.text self.id }
@@ -20,13 +20,8 @@ struct PlaylistVideo
xml.element("link", rel: "alternate", href: "#{HOST_URL}/watch?v=#{self.id}")
xml.element("author") do
- if auto_generated
- xml.element("name") { xml.text self.author }
- xml.element("uri") { xml.text "#{HOST_URL}/channel/#{self.ucid}" }
- else
- xml.element("name") { xml.text author }
- xml.element("uri") { xml.text "#{HOST_URL}/channel/#{ucid}" }
- end
+ xml.element("name") { xml.text self.author }
+ xml.element("uri") { xml.text "#{HOST_URL}/channel/#{self.ucid}" }
end
xml.element("content", type: "xhtml") do
@@ -47,18 +42,18 @@ struct PlaylistVideo
end
end
- def to_xml(auto_generated, xml : XML::Builder? = nil)
- if xml
- to_xml(auto_generated, xml)
- else
- XML.build do |json|
- to_xml(auto_generated, xml)
- end
- end
+ def to_xml(_xml : Nil = nil)
+ XML.build { |xml| to_xml(xml) }
end
- def to_json(locale, json : JSON::Builder, index : Int32?)
+ def to_json(locale : String?, json : JSON::Builder)
+ to_json(json)
+ end
+
+ def to_json(json : JSON::Builder, index : Int32? = nil)
json.object do
+ json.field "type", "video"
+
json.field "title", self.title
json.field "videoId", self.id
@@ -67,7 +62,7 @@ struct PlaylistVideo
json.field "authorUrl", "/channel/#{self.ucid}"
json.field "videoThumbnails" do
- generate_thumbnails(json, self.id)
+ Invidious::JSONify::APIv1.thumbnails(json, self.id)
end
if index
@@ -78,17 +73,12 @@ struct PlaylistVideo
end
json.field "lengthSeconds", self.length_seconds
+ json.field "liveNow", self.live_now
end
end
- def to_json(locale, json : JSON::Builder? = nil, index : Int32? = nil)
- if json
- to_json(locale, json, index: index)
- else
- JSON.build do |json|
- to_json(locale, json, index: index)
- end
- end
+ def to_json(_json : Nil, index : Int32? = nil)
+ JSON.build { |json| to_json(json, index: index) }
end
end
@@ -106,8 +96,9 @@ struct Playlist
property views : Int64
property updated : Time
property thumbnail : String?
+ property subtitle : String?
- def to_json(offset, locale, json : JSON::Builder, continuation : String? = nil)
+ def to_json(offset, json : JSON::Builder, video_id : String? = nil)
json.object do
json.field "type", "playlist"
json.field "title", self.title
@@ -117,6 +108,7 @@ struct Playlist
json.field "author", self.author
json.field "authorId", self.ucid
json.field "authorUrl", "/channel/#{self.ucid}"
+ json.field "subtitle", self.subtitle
json.field "authorThumbnails" do
json.array do
@@ -142,22 +134,18 @@ struct Playlist
json.field "videos" do
json.array do
- videos = get_playlist_videos(PG_DB, self, offset: offset, locale: locale, continuation: continuation)
- videos.each_with_index do |video, index|
- video.to_json(locale, json)
+ videos = get_playlist_videos(self, offset: offset, video_id: video_id)
+ videos.each do |video|
+ video.to_json(json)
end
end
end
end
end
- def to_json(offset, locale, json : JSON::Builder? = nil, continuation : String? = nil)
- if json
- to_json(offset, locale, json, continuation: continuation)
- else
- JSON.build do |json|
- to_json(offset, locale, json, continuation: continuation)
- end
+ def to_json(offset, _json : Nil = nil, video_id : String? = nil)
+ JSON.build do |json|
+ to_json(offset, json, video_id: video_id)
end
end
@@ -196,7 +184,7 @@ struct InvidiousPlaylist
end
end
- def to_json(offset, locale, json : JSON::Builder, continuation : String? = nil)
+ def to_json(offset, json : JSON::Builder, video_id : String? = nil)
json.object do
json.field "type", "invidiousPlaylist"
json.field "title", self.title
@@ -217,32 +205,29 @@ struct InvidiousPlaylist
json.field "videos" do
json.array do
- if !offset || offset == 0
- index = PG_DB.query_one?("SELECT index FROM playlist_videos WHERE plid = $1 AND id = $2 LIMIT 1", self.id, continuation, as: Int64)
+ if (!offset || offset == 0) && !video_id.nil?
+ index = Invidious::Database::PlaylistVideos.select_index(self.id, video_id)
offset = self.index.index(index) || 0
end
- videos = get_playlist_videos(PG_DB, self, offset: offset, locale: locale, continuation: continuation)
- videos.each_with_index do |video, index|
- video.to_json(locale, json, offset + index)
+ videos = get_playlist_videos(self, offset: offset, video_id: video_id)
+ videos.each_with_index do |video, idx|
+ video.to_json(json, offset + idx)
end
end
end
end
end
- def to_json(offset, locale, json : JSON::Builder? = nil, continuation : String? = nil)
- if json
- to_json(offset, locale, json, continuation: continuation)
- else
- JSON.build do |json|
- to_json(offset, locale, json, continuation: continuation)
- end
+ def to_json(offset, _json : Nil = nil, video_id : String? = nil)
+ JSON.build do |json|
+ to_json(offset, json, video_id: video_id)
end
end
def thumbnail
- @thumbnail_id ||= PG_DB.query_one?("SELECT id FROM playlist_videos WHERE plid = $1 ORDER BY array_position($2, index) LIMIT 1", self.id, self.index, as: String) || "-----------"
+ # TODO: Get playlist thumbnail from playlist data rather than first video
+ @thumbnail_id ||= Invidious::Database::PlaylistVideos.select_one_id(self.id, self.index) || "-----------"
"/vi/#{@thumbnail_id}/mqdefault.jpg"
end
@@ -259,11 +244,11 @@ struct InvidiousPlaylist
end
def description_html
- HTML.escape(self.description).gsub("\n", "<br>")
+ HTML.escape(self.description)
end
end
-def create_playlist(db, title, privacy, user)
+def create_playlist(title, privacy, user)
plid = "IVPL#{Random::Secure.urlsafe_base64(24)[0, 31]}"
playlist = InvidiousPlaylist.new({
@@ -278,17 +263,14 @@ def create_playlist(db, title, privacy, user)
index: [] of Int64,
})
- playlist_array = playlist.to_a
- args = arg_array(playlist_array)
-
- db.exec("INSERT INTO playlists VALUES (#{args})", args: playlist_array)
+ Invidious::Database::Playlists.insert(playlist)
return playlist
end
-def subscribe_playlist(db, user, playlist)
+def subscribe_playlist(user, playlist)
playlist = InvidiousPlaylist.new({
- title: playlist.title.byte_slice(0, 150),
+ title: playlist.title[..150],
id: playlist.id,
author: user.email,
description: "", # Max 5000 characters
@@ -299,10 +281,7 @@ def subscribe_playlist(db, user, playlist)
index: [] of Int64,
})
- playlist_array = playlist.to_a
- args = arg_array(playlist_array)
-
- db.exec("INSERT INTO playlists VALUES (#{args})", args: playlist_array)
+ Invidious::Database::Playlists.insert(playlist)
return playlist
end
@@ -322,21 +301,19 @@ def produce_playlist_continuation(id, index)
.try { |i| Protodec::Any.from_json(i) }
.try { |i| Base64.urlsafe_encode(i, padding: false) }
- data_wrapper = {"1:varint" => request_count, "15:string" => "PT:#{data}"}
- .try { |i| Protodec::Any.cast_json(i) }
- .try { |i| Protodec::Any.from_json(i) }
- .try { |i| Base64.urlsafe_encode(i) }
- .try { |i| URI.encode_www_form(i) }
-
object = {
"80226972:embedded" => {
- "2:string" => plid,
- "3:string" => data_wrapper,
+ "2:string" => plid,
+ "3:base64" => {
+ "1:varint" => request_count,
+ "15:string" => "PT:#{data}",
+ "104:embedded" => {"1:0:varint" => 0_i64},
+ },
"35:string" => id,
},
}
- continuation = object.try { |i| Protodec::Any.cast_json(object) }
+ continuation = object.try { |i| Protodec::Any.cast_json(i) }
.try { |i| Protodec::Any.from_json(i) }
.try { |i| Base64.urlsafe_encode(i) }
.try { |i| URI.encode_www_form(i) }
@@ -344,41 +321,32 @@ def produce_playlist_continuation(id, index)
return continuation
end
-def get_playlist(db, plid, locale, refresh = true, force_refresh = false)
+def get_playlist(plid : String)
if plid.starts_with? "IV"
- if playlist = db.query_one?("SELECT * FROM playlists WHERE id = $1", plid, as: InvidiousPlaylist)
+ if playlist = Invidious::Database::Playlists.select(id: plid)
return playlist
else
- raise InfoException.new("Playlist does not exist.")
+ raise NotFoundException.new("Playlist does not exist.")
end
else
- return fetch_playlist(plid, locale)
+ return fetch_playlist(plid)
end
end
-def fetch_playlist(plid, locale)
+def fetch_playlist(plid : String)
if plid.starts_with? "UC"
plid = "UU#{plid.lchop("UC")}"
end
- response = YT_POOL.client &.get("/playlist?list=#{plid}&hl=en")
- if response.status_code != 200
- if response.headers["location"]?.try &.includes? "/sorry/index"
- raise InfoException.new("Could not extract playlist info. Instance is likely blocked.")
- else
- raise InfoException.new("Not a playlist.")
- end
- end
-
- initial_data = extract_initial_data(response.body)
+ initial_data = YoutubeAPI.browse("VL" + plid, params: "")
- playlist_sidebar_renderer = initial_data["sidebar"]?.try &.["playlistSidebarRenderer"]?.try &.["items"]?
+ playlist_sidebar_renderer = initial_data.dig?("sidebar", "playlistSidebarRenderer", "items")
raise InfoException.new("Could not extract playlistSidebarRenderer.") if !playlist_sidebar_renderer
- playlist_info = playlist_sidebar_renderer[0]["playlistSidebarPrimaryInfoRenderer"]?
+ playlist_info = playlist_sidebar_renderer.dig?(0, "playlistSidebarPrimaryInfoRenderer")
raise InfoException.new("Could not extract playlist info") if !playlist_info
- title = playlist_info["title"]?.try &.["runs"][0]?.try &.["text"]?.try &.as_s || ""
+ title = playlist_info.dig?("title", "runs", 0, "text").try &.as_s || ""
desc_item = playlist_info["description"]?
@@ -388,18 +356,25 @@ def fetch_playlist(plid, locale)
description_html = desc_item.try &.["runs"]?.try &.as_a
.try { |run| content_to_comment_html(run).try &.to_s } || "<p></p>"
- thumbnail = playlist_info["thumbnailRenderer"]?.try &.["playlistVideoThumbnailRenderer"]?
- .try &.["thumbnail"]["thumbnails"][0]["url"]?.try &.as_s
+ thumbnail = playlist_info.dig?(
+ "thumbnailRenderer", "playlistVideoThumbnailRenderer",
+ "thumbnail", "thumbnails", 0, "url"
+ ).try &.as_s
views = 0_i64
updated = Time.utc
video_count = 0
+
+ subtitle = extract_text(initial_data.dig?("header", "playlistHeaderRenderer", "subtitle"))
+
playlist_info["stats"]?.try &.as_a.each do |stat|
text = stat["runs"]?.try &.as_a.map(&.["text"].as_s).join("") || stat["simpleText"]?.try &.as_s
next if !text
if text.includes? "video"
video_count = text.gsub(/\D/, "").to_i? || 0
+ elsif text.includes? "episode"
+ video_count = text.gsub(/\D/, "").to_i? || 0
elsif text.includes? "view"
views = text.gsub(/\D/, "").to_i64? || 0_i64
else
@@ -412,12 +387,15 @@ def fetch_playlist(plid, locale)
author_thumbnail = ""
ucid = ""
else
- author_info = playlist_sidebar_renderer[1]["playlistSidebarSecondaryInfoRenderer"]?.try &.["videoOwner"]["videoOwnerRenderer"]?
+ author_info = playlist_sidebar_renderer[1].dig?(
+ "playlistSidebarSecondaryInfoRenderer", "videoOwner", "videoOwnerRenderer"
+ )
+
raise InfoException.new("Could not extract author info") if !author_info
- author = author_info["title"]["runs"][0]["text"]?.try &.as_s || ""
- author_thumbnail = author_info["thumbnail"]["thumbnails"][0]["url"]?.try &.as_s || ""
- ucid = author_info["title"]["runs"][0]["navigationEndpoint"]["browseEndpoint"]["browseId"]?.try &.as_s || ""
+ author = author_info.dig?("title", "runs", 0, "text").try &.as_s || ""
+ author_thumbnail = author_info.dig?("thumbnail", "thumbnails", 0, "url").try &.as_s || ""
+ ucid = author_info.dig?("title", "runs", 0, "navigationEndpoint", "browseEndpoint", "browseId").try &.as_s || ""
end
return Playlist.new({
@@ -432,36 +410,40 @@ def fetch_playlist(plid, locale)
views: views,
updated: updated,
thumbnail: thumbnail,
+ subtitle: subtitle,
})
end
-def get_playlist_videos(db, playlist, offset, locale = nil, continuation = nil)
- # Show empy playlist if requested page is out of range
+def get_playlist_videos(playlist : InvidiousPlaylist | Playlist, offset : Int32, video_id = nil)
+ # Show empty playlist if requested page is out of range
# (e.g, when a new playlist has been created, offset will be negative)
if offset >= playlist.video_count || offset < 0
return [] of PlaylistVideo
end
if playlist.is_a? InvidiousPlaylist
- db.query_all("SELECT * FROM playlist_videos WHERE plid = $1 ORDER BY array_position($2, index) LIMIT 100 OFFSET $3",
- playlist.id, playlist.index, offset, as: PlaylistVideo)
+ Invidious::Database::PlaylistVideos.select(playlist.id, playlist.index, offset, limit: 100)
else
- if offset >= 100
- # Normalize offset to match youtube's behavior (100 videos chunck per request)
- offset = (offset / 100).to_i64 * 100_i64
+ if video_id
+ initial_data = YoutubeAPI.next({
+ "videoId" => video_id,
+ "playlistId" => playlist.id,
+ })
+ offset = initial_data.dig?("contents", "twoColumnWatchNextResults", "playlist", "playlist", "currentIndex").try &.as_i || offset
+ end
+
+ videos = [] of PlaylistVideo
+ until videos.size >= 200 || videos.size == playlist.video_count || offset >= playlist.video_count
+ # 100 videos per request
ctoken = produce_playlist_continuation(playlist.id, offset)
- initial_data = JSON.parse(request_youtube_api_browse(ctoken)).as_h
- else
- response = YT_POOL.client &.get("/playlist?list=#{playlist.id}&gl=US&hl=en")
- initial_data = extract_initial_data(response.body)
- end
+ initial_data = YoutubeAPI.browse(ctoken)
+ videos += extract_playlist_videos(initial_data)
- if initial_data
- return extract_playlist_videos(initial_data)
- else
- return [] of PlaylistVideo
+ offset += 100
end
+
+ return videos
end
end
@@ -495,7 +477,6 @@ def extract_playlist_videos(initial_data : Hash(String, JSON::Any))
plid = i["navigationEndpoint"]["watchEndpoint"]["playlistId"].as_s
index = i["navigationEndpoint"]["watchEndpoint"]["index"].as_i64
- thumbnail = i["thumbnail"]["thumbnails"][0]["url"].as_s
title = i["title"].try { |t| t["simpleText"]? || t["runs"]?.try &.[0]["text"]? }.try &.as_s || ""
author = i["shortBylineText"]?.try &.["runs"][0]["text"].as_s || ""
ucid = i["shortBylineText"]?.try &.["runs"][0]["navigationEndpoint"]["browseEndpoint"]["browseId"].as_s || ""
@@ -537,10 +518,10 @@ def template_playlist(playlist)
playlist["videos"].as_a.each do |video|
html += <<-END_HTML
- <li class="pure-menu-item">
- <a href="/watch?v=#{video["videoId"]}&list=#{playlist["playlistId"]}">
+ <li class="pure-menu-item" id="#{video["videoId"]}">
+ <a href="/watch?v=#{video["videoId"]}&list=#{playlist["playlistId"]}&index=#{video["index"]}">
<div class="thumbnail">
- <img class="thumbnail" src="/vi/#{video["videoId"]}/mqdefault.jpg">
+ <img loading="lazy" class="thumbnail" src="/vi/#{video["videoId"]}/mqdefault.jpg" alt="" />
<p class="length">#{recode_length_seconds(video["lengthSeconds"].as_i)}</p>
</div>
<p style="width:100%">#{video["title"]}</p>
diff --git a/src/invidious/routes/account.cr b/src/invidious/routes/account.cr
new file mode 100644
index 00000000..dd65e7a6
--- /dev/null
+++ b/src/invidious/routes/account.cr
@@ -0,0 +1,354 @@
+{% skip_file if flag?(:api_only) %}
+
+module Invidious::Routes::Account
+ extend self
+
+ # -------------------
+ # Password update
+ # -------------------
+
+ # Show the password change interface (GET request)
+ def get_change_password(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ sid = env.get? "sid"
+ referer = get_referer(env)
+
+ if !user
+ return env.redirect referer
+ end
+
+ user = user.as(User)
+ sid = sid.as(String)
+ csrf_token = generate_response(sid, {":change_password"}, HMAC_KEY)
+
+ templated "user/change_password"
+ end
+
+ # Handle the password change (POST request)
+ def post_change_password(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ sid = env.get? "sid"
+ referer = get_referer(env)
+
+ if !user
+ return env.redirect referer
+ end
+
+ user = user.as(User)
+ sid = sid.as(String)
+ token = env.params.body["csrf_token"]?
+
+ begin
+ validate_request(token, sid, env.request, HMAC_KEY, locale)
+ rescue ex
+ return error_template(400, ex)
+ end
+
+ password = env.params.body["password"]?
+ if password.nil? || password.empty?
+ return error_template(401, "Password is a required field")
+ end
+
+ new_passwords = env.params.body.select { |k, _| k.match(/^new_password\[\d+\]$/) }.map { |_, v| v }
+
+ if new_passwords.size <= 1 || new_passwords.uniq.size != 1
+ return error_template(400, "New passwords must match")
+ end
+
+ new_password = new_passwords.uniq[0]
+ if new_password.empty?
+ return error_template(401, "Password cannot be empty")
+ end
+
+ if new_password.bytesize > 55
+ return error_template(400, "Password cannot be longer than 55 characters")
+ end
+
+ if !Crypto::Bcrypt::Password.new(user.password.not_nil!).verify(password.byte_slice(0, 55))
+ return error_template(401, "Incorrect password")
+ end
+
+ new_password = Crypto::Bcrypt::Password.create(new_password, cost: 10)
+ Invidious::Database::Users.update_password(user, new_password.to_s)
+
+ env.redirect referer
+ end
+
+ # -------------------
+ # Account deletion
+ # -------------------
+
+ # Show the account deletion confirmation prompt (GET request)
+ def get_delete(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ sid = env.get? "sid"
+ referer = get_referer(env)
+
+ if !user
+ return env.redirect referer
+ end
+
+ user = user.as(User)
+ sid = sid.as(String)
+ csrf_token = generate_response(sid, {":delete_account"}, HMAC_KEY)
+
+ templated "user/delete_account"
+ end
+
+ # Handle the account deletion (POST request)
+ def post_delete(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ sid = env.get? "sid"
+ referer = get_referer(env)
+
+ if !user
+ return env.redirect referer
+ end
+
+ user = user.as(User)
+ sid = sid.as(String)
+ token = env.params.body["csrf_token"]?
+
+ begin
+ validate_request(token, sid, env.request, HMAC_KEY, locale)
+ rescue ex
+ return error_template(400, ex)
+ end
+
+ view_name = "subscriptions_#{sha256(user.email)}"
+ Invidious::Database::Users.delete(user)
+ Invidious::Database::SessionIDs.delete(email: user.email)
+ PG_DB.exec("DROP MATERIALIZED VIEW #{view_name}")
+
+ env.request.cookies.each do |cookie|
+ cookie.expires = Time.utc(1990, 1, 1)
+ env.response.cookies << cookie
+ end
+
+ env.redirect referer
+ end
+
+ # -------------------
+ # Clear history
+ # -------------------
+
+ # Show the watch history deletion confirmation prompt (GET request)
+ def get_clear_history(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ sid = env.get? "sid"
+ referer = get_referer(env)
+
+ if !user
+ return env.redirect referer
+ end
+
+ user = user.as(User)
+ sid = sid.as(String)
+ csrf_token = generate_response(sid, {":clear_watch_history"}, HMAC_KEY)
+
+ templated "user/clear_watch_history"
+ end
+
+ # Handle the watch history clearing (POST request)
+ def post_clear_history(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ sid = env.get? "sid"
+ referer = get_referer(env)
+
+ if !user
+ return env.redirect referer
+ end
+
+ user = user.as(User)
+ sid = sid.as(String)
+ token = env.params.body["csrf_token"]?
+
+ begin
+ validate_request(token, sid, env.request, HMAC_KEY, locale)
+ rescue ex
+ return error_template(400, ex)
+ end
+
+ Invidious::Database::Users.clear_watch_history(user)
+ env.redirect referer
+ end
+
+ # -------------------
+ # Authorize tokens
+ # -------------------
+
+ # Show the "authorize token?" confirmation prompt (GET request)
+ def get_authorize_token(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ sid = env.get? "sid"
+ referer = get_referer(env)
+
+ if !user
+ return env.redirect "/login?referer=#{URI.encode_path_segment(env.request.resource)}"
+ end
+
+ user = user.as(User)
+ sid = sid.as(String)
+ csrf_token = generate_response(sid, {":authorize_token"}, HMAC_KEY)
+
+ scopes = env.params.query["scopes"]?.try &.split(",")
+ scopes ||= [] of String
+
+ callback_url = env.params.query["callback_url"]?
+ if callback_url
+ callback_url = URI.parse(callback_url)
+ end
+
+ expire = env.params.query["expire"]?.try &.to_i?
+
+ templated "user/authorize_token"
+ end
+
+ # Handle token authorization (POST request)
+ def post_authorize_token(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ sid = env.get? "sid"
+ referer = get_referer(env)
+
+ if !user
+ return env.redirect referer
+ end
+
+ user = env.get("user").as(User)
+ sid = sid.as(String)
+ token = env.params.body["csrf_token"]?
+
+ begin
+ validate_request(token, sid, env.request, HMAC_KEY, locale)
+ rescue ex
+ return error_template(400, ex)
+ end
+
+ scopes = env.params.body.select { |k, _| k.match(/^scopes\[\d+\]$/) }.map { |_, v| v }
+ callback_url = env.params.body["callbackUrl"]?
+ expire = env.params.body["expire"]?.try &.to_i?
+
+ access_token = generate_token(user.email, scopes, expire, HMAC_KEY)
+
+ if callback_url
+ access_token = URI.encode_www_form(access_token)
+ url = URI.parse(callback_url)
+
+ if url.query
+ query = HTTP::Params.parse(url.query.not_nil!)
+ else
+ query = HTTP::Params.new
+ end
+
+ query["token"] = access_token
+ query["username"] = URI.encode_path_segment(user.email)
+ url.query = query.to_s
+
+ env.redirect url.to_s
+ else
+ csrf_token = ""
+ env.set "access_token", access_token
+ templated "user/authorize_token"
+ end
+ end
+
+ # -------------------
+ # Manage tokens
+ # -------------------
+
+ # Show the token manager page (GET request)
+ def token_manager(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ sid = env.get? "sid"
+ referer = get_referer(env, "/subscription_manager")
+
+ if !user
+ return env.redirect referer
+ end
+
+ user = user.as(User)
+ tokens = Invidious::Database::SessionIDs.select_all(user.email)
+
+ templated "user/token_manager"
+ end
+
+ # -------------------
+ # AJAX for tokens
+ # -------------------
+
+ # Handle internal (non-API) token actions (POST request)
+ def token_ajax(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ sid = env.get? "sid"
+ referer = get_referer(env)
+
+ redirect = env.params.query["redirect"]?
+ redirect ||= "true"
+ redirect = redirect == "true"
+
+ if !user
+ if redirect
+ return env.redirect referer
+ else
+ return error_json(403, "No such user")
+ end
+ end
+
+ user = user.as(User)
+ sid = sid.as(String)
+ token = env.params.body["csrf_token"]?
+
+ begin
+ validate_request(token, sid, env.request, HMAC_KEY, locale)
+ rescue ex
+ if redirect
+ return error_template(400, ex)
+ else
+ return error_json(400, ex)
+ end
+ end
+
+ if env.params.query["action_revoke_token"]?
+ action = "action_revoke_token"
+ else
+ return env.redirect referer
+ end
+
+ session = env.params.query["session"]?
+ session ||= ""
+
+ case action
+ when .starts_with? "action_revoke_token"
+ Invidious::Database::SessionIDs.delete(sid: session, email: user.email)
+ else
+ return error_json(400, "Unsupported action #{action}")
+ end
+
+ if redirect
+ return env.redirect referer
+ else
+ env.response.content_type = "application/json"
+ return "{}"
+ end
+ end
+end
diff --git a/src/invidious/routes/api/manifest.cr b/src/invidious/routes/api/manifest.cr
new file mode 100644
index 00000000..d89e752c
--- /dev/null
+++ b/src/invidious/routes/api/manifest.cr
@@ -0,0 +1,241 @@
+module Invidious::Routes::API::Manifest
+ # /api/manifest/dash/id/:id
+ def self.get_dash_video_id(env)
+ env.response.headers.add("Access-Control-Allow-Origin", "*")
+ env.response.content_type = "application/dash+xml"
+
+ local = env.params.query["local"]?.try &.== "true"
+ id = env.params.url["id"]
+ region = env.params.query["region"]?
+
+ # Since some implementations create playlists based on resolution regardless of different codecs,
+ # we can opt to only add a source to a representation if it has a unique height within that representation
+ unique_res = env.params.query["unique_res"]?.try { |q| (q == "true" || q == "1").to_unsafe }
+
+ begin
+ video = get_video(id, region: region)
+ rescue ex : NotFoundException
+ haltf env, status_code: 404
+ rescue ex
+ haltf env, status_code: 403
+ end
+
+ if dashmpd = video.dash_manifest_url
+ response = YT_POOL.client &.get(URI.parse(dashmpd).request_target)
+
+ if response.status_code != 200
+ haltf env, status_code: response.status_code
+ end
+
+ manifest = response.body
+
+ manifest = manifest.gsub(/<BaseURL>[^<]+<\/BaseURL>/) do |baseurl|
+ url = baseurl.lchop("<BaseURL>")
+ url = url.rchop("</BaseURL>")
+
+ if local
+ uri = URI.parse(url)
+ url = "#{HOST_URL}#{uri.request_target}host/#{uri.host}/"
+ end
+
+ "<BaseURL>#{url}</BaseURL>"
+ end
+
+ return manifest
+ end
+
+ adaptive_fmts = video.adaptive_fmts
+
+ if local
+ adaptive_fmts.each do |fmt|
+ fmt["url"] = JSON::Any.new("#{HOST_URL}#{URI.parse(fmt["url"].as_s).request_target}")
+ end
+ end
+
+ audio_streams = video.audio_streams.sort_by { |stream| {stream["bitrate"].as_i} }.reverse!
+ video_streams = video.video_streams.sort_by { |stream| {stream["width"].as_i, stream["fps"].as_i} }.reverse!
+
+ manifest = XML.build(indent: " ", encoding: "UTF-8") do |xml|
+ xml.element("MPD", "xmlns": "urn:mpeg:dash:schema:mpd:2011",
+ "profiles": "urn:mpeg:dash:profile:full:2011", minBufferTime: "PT1.5S", type: "static",
+ mediaPresentationDuration: "PT#{video.length_seconds}S") do
+ xml.element("Period") do
+ i = 0
+
+ {"audio/mp4"}.each do |mime_type|
+ mime_streams = audio_streams.select { |stream| stream["mimeType"].as_s.starts_with? mime_type }
+ next if mime_streams.empty?
+
+ mime_streams.each do |fmt|
+ # OTF streams aren't supported yet (See https://github.com/TeamNewPipe/NewPipe/issues/2415)
+ next if !(fmt.has_key?("indexRange") && fmt.has_key?("initRange"))
+
+ # Different representations of the same audio should be groupped into one AdaptationSet.
+ # However, most players don't support auto quality switching, so we have to trick them
+ # into providing a quality selector.
+ # See https://github.com/iv-org/invidious/issues/3074 for more details.
+ xml.element("AdaptationSet", id: i, mimeType: mime_type, startWithSAP: 1, subsegmentAlignment: true, label: fmt["bitrate"].to_s + "k") do
+ codecs = fmt["mimeType"].as_s.split("codecs=")[1].strip('"')
+ bandwidth = fmt["bitrate"].as_i
+ itag = fmt["itag"].as_i
+ url = fmt["url"].as_s
+
+ xml.element("Role", schemeIdUri: "urn:mpeg:dash:role:2011", value: i == 0 ? "main" : "alternate")
+
+ xml.element("Representation", id: fmt["itag"], codecs: codecs, bandwidth: bandwidth) do
+ xml.element("AudioChannelConfiguration", schemeIdUri: "urn:mpeg:dash:23003:3:audio_channel_configuration:2011",
+ value: "2")
+ xml.element("BaseURL") { xml.text url }
+ xml.element("SegmentBase", indexRange: "#{fmt["indexRange"]["start"]}-#{fmt["indexRange"]["end"]}") do
+ xml.element("Initialization", range: "#{fmt["initRange"]["start"]}-#{fmt["initRange"]["end"]}")
+ end
+ end
+ end
+ i += 1
+ end
+ end
+
+ potential_heights = {4320, 2160, 1440, 1080, 720, 480, 360, 240, 144}
+
+ {"video/mp4"}.each do |mime_type|
+ mime_streams = video_streams.select { |stream| stream["mimeType"].as_s.starts_with? mime_type }
+ next if mime_streams.empty?
+
+ heights = [] of Int32
+ xml.element("AdaptationSet", id: i, mimeType: mime_type, startWithSAP: 1, subsegmentAlignment: true, scanType: "progressive") do
+ mime_streams.each do |fmt|
+ # OTF streams aren't supported yet (See https://github.com/TeamNewPipe/NewPipe/issues/2415)
+ next if !(fmt.has_key?("indexRange") && fmt.has_key?("initRange"))
+
+ codecs = fmt["mimeType"].as_s.split("codecs=")[1].strip('"')
+ bandwidth = fmt["bitrate"].as_i
+ itag = fmt["itag"].as_i
+ url = fmt["url"].as_s
+ width = fmt["width"].as_i
+ height = fmt["height"].as_i
+
+ # Resolutions reported by YouTube player (may not accurately reflect source)
+ height = potential_heights.min_by { |x| (height - x).abs }
+ next if unique_res && heights.includes? height
+ heights << height
+
+ xml.element("Representation", id: itag, codecs: codecs, width: width, height: height,
+ startWithSAP: "1", maxPlayoutRate: "1",
+ bandwidth: bandwidth, frameRate: fmt["fps"]) do
+ xml.element("BaseURL") { xml.text url }
+ xml.element("SegmentBase", indexRange: "#{fmt["indexRange"]["start"]}-#{fmt["indexRange"]["end"]}") do
+ xml.element("Initialization", range: "#{fmt["initRange"]["start"]}-#{fmt["initRange"]["end"]}")
+ end
+ end
+ end
+ end
+
+ i += 1
+ end
+ end
+ end
+ end
+
+ return manifest
+ end
+
+ # /api/manifest/dash/id/videoplayback
+ def self.get_dash_video_playback(env)
+ env.response.headers.delete("Content-Type")
+ env.response.headers["Access-Control-Allow-Origin"] = "*"
+ env.redirect "/videoplayback?#{env.params.query}"
+ end
+
+ # /api/manifest/dash/id/videoplayback/*
+ def self.get_dash_video_playback_greedy(env)
+ env.response.headers.delete("Content-Type")
+ env.response.headers["Access-Control-Allow-Origin"] = "*"
+ env.redirect env.request.path.lchop("/api/manifest/dash/id")
+ end
+
+ # /api/manifest/dash/id/videoplayback && /api/manifest/dash/id/videoplayback/*
+ def self.options_dash_video_playback(env)
+ env.response.headers.delete("Content-Type")
+ env.response.headers["Access-Control-Allow-Origin"] = "*"
+ env.response.headers["Access-Control-Allow-Methods"] = "GET, OPTIONS"
+ env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, Range"
+ end
+
+ # /api/manifest/hls_playlist/*
+ def self.get_hls_playlist(env)
+ response = YT_POOL.client &.get(env.request.path)
+
+ if response.status_code != 200
+ haltf env, status_code: response.status_code
+ end
+
+ local = env.params.query["local"]?.try &.== "true"
+
+ env.response.content_type = "application/x-mpegURL"
+ env.response.headers.add("Access-Control-Allow-Origin", "*")
+
+ manifest = response.body
+
+ if local
+ manifest = manifest.gsub(/^https:\/\/\w+---.{11}\.c\.youtube\.com[^\n]*/m) do |match|
+ path = URI.parse(match).path
+
+ path = path.lchop("/videoplayback/")
+ path = path.rchop("/")
+
+ path = path.gsub(/mime\/\w+\/\w+/) do |mimetype|
+ mimetype = mimetype.split("/")
+ mimetype[0] + "/" + mimetype[1] + "%2F" + mimetype[2]
+ end
+
+ path = path.split("/")
+
+ raw_params = {} of String => Array(String)
+ path.each_slice(2) do |pair|
+ key, value = pair
+ value = URI.decode_www_form(value)
+
+ if raw_params[key]?
+ raw_params[key] << value
+ else
+ raw_params[key] = [value]
+ end
+ end
+
+ raw_params = HTTP::Params.new(raw_params)
+ if fvip = raw_params["hls_chunk_host"].match(/r(?<fvip>\d+)---/)
+ raw_params["fvip"] = fvip["fvip"]
+ end
+
+ raw_params["local"] = "true"
+
+ "#{HOST_URL}/videoplayback?#{raw_params}"
+ end
+ end
+
+ manifest
+ end
+
+ # /api/manifest/hls_variant/*
+ def self.get_hls_variant(env)
+ response = YT_POOL.client &.get(env.request.path)
+
+ if response.status_code != 200
+ haltf env, status_code: response.status_code
+ end
+
+ local = env.params.query["local"]?.try &.== "true"
+
+ env.response.content_type = "application/x-mpegURL"
+ env.response.headers.add("Access-Control-Allow-Origin", "*")
+
+ manifest = response.body
+
+ if local
+ manifest = manifest.gsub("https://www.youtube.com", HOST_URL)
+ manifest = manifest.gsub("index.m3u8", "index.m3u8?local=true")
+ end
+
+ manifest
+ end
+end
diff --git a/src/invidious/routes/api/v1/authenticated.cr b/src/invidious/routes/api/v1/authenticated.cr
new file mode 100644
index 00000000..a35d2f2b
--- /dev/null
+++ b/src/invidious/routes/api/v1/authenticated.cr
@@ -0,0 +1,490 @@
+module Invidious::Routes::API::V1::Authenticated
+ # The notification APIs cannot be extracted yet!
+ # They require the *local* notifications constant defined in invidious.cr
+ #
+ # def self.notifications(env)
+ # env.response.content_type = "text/event-stream"
+
+ # topics = env.params.body["topics"]?.try &.split(",").uniq.first(1000)
+ # topics ||= [] of String
+
+ # create_notification_stream(env, topics, connection_channel)
+ # end
+
+ def self.get_preferences(env)
+ env.response.content_type = "application/json"
+ user = env.get("user").as(User)
+ user.preferences.to_json
+ end
+
+ def self.set_preferences(env)
+ env.response.content_type = "application/json"
+ user = env.get("user").as(User)
+
+ begin
+ user.preferences = Preferences.from_json(env.request.body || "{}")
+ rescue
+ end
+
+ Invidious::Database::Users.update_preferences(user)
+
+ env.response.status_code = 204
+ end
+
+ def self.export_invidious(env)
+ env.response.content_type = "application/json"
+ user = env.get("user").as(User)
+
+ return Invidious::User::Export.to_invidious(user)
+ end
+
+ def self.import_invidious(env)
+ user = env.get("user").as(User)
+
+ begin
+ if body = env.request.body
+ body = env.request.body.not_nil!.gets_to_end
+ else
+ body = "{}"
+ end
+ Invidious::User::Import.from_invidious(user, body)
+ rescue
+ end
+
+ env.response.status_code = 204
+ end
+
+ def self.get_history(env)
+ env.response.content_type = "application/json"
+ user = env.get("user").as(User)
+
+ page = env.params.query["page"]?.try &.to_i?.try &.clamp(0, Int32::MAX)
+ page ||= 1
+
+ max_results = env.params.query["max_results"]?.try &.to_i?.try &.clamp(0, MAX_ITEMS_PER_PAGE)
+ max_results ||= user.preferences.max_results
+ max_results ||= CONFIG.default_user_preferences.max_results
+
+ start_index = (page - 1) * max_results
+ if user.watched[start_index]?
+ watched = user.watched.reverse[start_index, max_results]
+ end
+ watched ||= [] of String
+
+ return watched.to_json
+ end
+
+ def self.mark_watched(env)
+ user = env.get("user").as(User)
+
+ if !user.preferences.watch_history
+ return error_json(409, "Watch history is disabled in preferences.")
+ end
+
+ id = env.params.url["id"]
+ if !id.match(/^[a-zA-Z0-9_-]{11}$/)
+ return error_json(400, "Invalid video id.")
+ end
+
+ Invidious::Database::Users.mark_watched(user, id)
+ env.response.status_code = 204
+ end
+
+ def self.mark_unwatched(env)
+ user = env.get("user").as(User)
+
+ if !user.preferences.watch_history
+ return error_json(409, "Watch history is disabled in preferences.")
+ end
+
+ id = env.params.url["id"]
+ if !id.match(/^[a-zA-Z0-9_-]{11}$/)
+ return error_json(400, "Invalid video id.")
+ end
+
+ Invidious::Database::Users.mark_unwatched(user, id)
+ env.response.status_code = 204
+ end
+
+ def self.clear_history(env)
+ user = env.get("user").as(User)
+
+ Invidious::Database::Users.clear_watch_history(user)
+ env.response.status_code = 204
+ end
+
+ def self.feed(env)
+ env.response.content_type = "application/json"
+
+ user = env.get("user").as(User)
+ locale = env.get("preferences").as(Preferences).locale
+
+ max_results = env.params.query["max_results"]?.try &.to_i?
+ max_results ||= user.preferences.max_results
+ max_results ||= CONFIG.default_user_preferences.max_results
+
+ page = env.params.query["page"]?.try &.to_i?
+ page ||= 1
+
+ videos, notifications = get_subscription_feed(user, max_results, page)
+
+ JSON.build do |json|
+ json.object do
+ json.field "notifications" do
+ json.array do
+ notifications.each do |video|
+ video.to_json(locale, json)
+ end
+ end
+ end
+
+ json.field "videos" do
+ json.array do
+ videos.each do |video|
+ video.to_json(locale, json)
+ end
+ end
+ end
+ end
+ end
+ end
+
+ def self.get_subscriptions(env)
+ env.response.content_type = "application/json"
+ user = env.get("user").as(User)
+
+ subscriptions = Invidious::Database::Channels.select(user.subscriptions)
+
+ JSON.build do |json|
+ json.array do
+ subscriptions.each do |subscription|
+ json.object do
+ json.field "author", subscription.author
+ json.field "authorId", subscription.id
+ end
+ end
+ end
+ end
+ end
+
+ def self.subscribe_channel(env)
+ env.response.content_type = "application/json"
+ user = env.get("user").as(User)
+
+ ucid = env.params.url["ucid"]
+
+ if !user.subscriptions.includes? ucid
+ get_channel(ucid)
+ Invidious::Database::Users.subscribe_channel(user, ucid)
+ end
+
+ env.response.status_code = 204
+ end
+
+ def self.unsubscribe_channel(env)
+ env.response.content_type = "application/json"
+ user = env.get("user").as(User)
+
+ ucid = env.params.url["ucid"]
+
+ Invidious::Database::Users.unsubscribe_channel(user, ucid)
+
+ env.response.status_code = 204
+ end
+
+ def self.list_playlists(env)
+ env.response.content_type = "application/json"
+ user = env.get("user").as(User)
+
+ playlists = Invidious::Database::Playlists.select_all(author: user.email)
+
+ JSON.build do |json|
+ json.array do
+ playlists.each do |playlist|
+ playlist.to_json(0, json)
+ end
+ end
+ end
+ end
+
+ def self.create_playlist(env)
+ env.response.content_type = "application/json"
+ user = env.get("user").as(User)
+
+ title = env.params.json["title"]?.try &.as(String).delete("<>").byte_slice(0, 150)
+ if !title
+ return error_json(400, "Invalid title.")
+ end
+
+ privacy = env.params.json["privacy"]?.try { |p| PlaylistPrivacy.parse(p.as(String).downcase) }
+ if !privacy
+ return error_json(400, "Invalid privacy setting.")
+ end
+
+ if Invidious::Database::Playlists.count_owned_by(user.email) >= 100
+ return error_json(400, "User cannot have more than 100 playlists.")
+ end
+
+ playlist = create_playlist(title, privacy, user)
+ env.response.headers["Location"] = "#{HOST_URL}/api/v1/auth/playlists/#{playlist.id}"
+ env.response.status_code = 201
+ {
+ "title" => title,
+ "playlistId" => playlist.id,
+ }.to_json
+ end
+
+ def self.update_playlist_attribute(env)
+ env.response.content_type = "application/json"
+ user = env.get("user").as(User)
+
+ plid = env.params.url["plid"]?
+ if !plid || plid.empty?
+ return error_json(400, "A playlist ID is required")
+ end
+
+ playlist = Invidious::Database::Playlists.select(id: plid)
+ if !playlist || playlist.author != user.email && playlist.privacy.private?
+ return error_json(404, "Playlist does not exist.")
+ end
+
+ if playlist.author != user.email
+ return error_json(403, "Invalid user")
+ end
+
+ title = env.params.json["title"].try &.as(String).delete("<>").byte_slice(0, 150) || playlist.title
+ privacy = env.params.json["privacy"]?.try { |p| PlaylistPrivacy.parse(p.as(String).downcase) } || playlist.privacy
+ description = env.params.json["description"]?.try &.as(String).delete("\r") || playlist.description
+
+ if title != playlist.title ||
+ privacy != playlist.privacy ||
+ description != playlist.description
+ updated = Time.utc
+ else
+ updated = playlist.updated
+ end
+
+ Invidious::Database::Playlists.update(plid, title, privacy, description, updated)
+
+ env.response.status_code = 204
+ end
+
+ def self.delete_playlist(env)
+ env.response.content_type = "application/json"
+ user = env.get("user").as(User)
+
+ plid = env.params.url["plid"]
+
+ playlist = Invidious::Database::Playlists.select(id: plid)
+ if !playlist || playlist.author != user.email && playlist.privacy.private?
+ return error_json(404, "Playlist does not exist.")
+ end
+
+ if playlist.author != user.email
+ return error_json(403, "Invalid user")
+ end
+
+ Invidious::Database::Playlists.delete(plid)
+
+ env.response.status_code = 204
+ end
+
+ def self.insert_video_into_playlist(env)
+ env.response.content_type = "application/json"
+ user = env.get("user").as(User)
+
+ plid = env.params.url["plid"]
+
+ playlist = Invidious::Database::Playlists.select(id: plid)
+ if !playlist || playlist.author != user.email && playlist.privacy.private?
+ return error_json(404, "Playlist does not exist.")
+ end
+
+ if playlist.author != user.email
+ return error_json(403, "Invalid user")
+ end
+
+ if playlist.index.size >= CONFIG.playlist_length_limit
+ return error_json(400, "Playlist cannot have more than #{CONFIG.playlist_length_limit} videos")
+ end
+
+ video_id = env.params.json["videoId"].try &.as(String)
+ if !video_id
+ return error_json(403, "Invalid videoId")
+ end
+
+ begin
+ video = get_video(video_id)
+ rescue ex : NotFoundException
+ return error_json(404, ex)
+ rescue ex
+ return error_json(500, ex)
+ end
+
+ playlist_video = PlaylistVideo.new({
+ title: video.title,
+ id: video.id,
+ author: video.author,
+ ucid: video.ucid,
+ length_seconds: video.length_seconds,
+ published: video.published,
+ plid: plid,
+ live_now: video.live_now,
+ index: Random::Secure.rand(0_i64..Int64::MAX),
+ })
+
+ Invidious::Database::PlaylistVideos.insert(playlist_video)
+ Invidious::Database::Playlists.update_video_added(plid, playlist_video.index)
+
+ env.response.headers["Location"] = "#{HOST_URL}/api/v1/auth/playlists/#{plid}/videos/#{playlist_video.index.to_u64.to_s(16).upcase}"
+ env.response.status_code = 201
+
+ JSON.build do |json|
+ playlist_video.to_json(json, index: playlist.index.size)
+ end
+ end
+
+ def self.delete_video_in_playlist(env)
+ env.response.content_type = "application/json"
+ user = env.get("user").as(User)
+
+ plid = env.params.url["plid"]
+ index = env.params.url["index"].to_i64(16)
+
+ playlist = Invidious::Database::Playlists.select(id: plid)
+ if !playlist || playlist.author != user.email && playlist.privacy.private?
+ return error_json(404, "Playlist does not exist.")
+ end
+
+ if playlist.author != user.email
+ return error_json(403, "Invalid user")
+ end
+
+ if !playlist.index.includes? index
+ return error_json(404, "Playlist does not contain index")
+ end
+
+ Invidious::Database::PlaylistVideos.delete(index)
+ Invidious::Database::Playlists.update_video_removed(plid, index)
+
+ env.response.status_code = 204
+ end
+
+ # Invidious::Routing.patch "/api/v1/auth/playlists/:plid/videos/:index"
+ # def modify_playlist_at(env)
+ # TODO
+ # end
+
+ def self.get_tokens(env)
+ env.response.content_type = "application/json"
+ user = env.get("user").as(User)
+ scopes = env.get("scopes").as(Array(String))
+
+ tokens = Invidious::Database::SessionIDs.select_all(user.email)
+
+ JSON.build do |json|
+ json.array do
+ tokens.each do |token|
+ json.object do
+ json.field "session", token[:session]
+ json.field "issued", token[:issued].to_unix
+ end
+ end
+ end
+ end
+ end
+
+ def self.register_token(env)
+ user = env.get("user").as(User)
+ locale = env.get("preferences").as(Preferences).locale
+
+ case env.request.headers["Content-Type"]?
+ when "application/x-www-form-urlencoded"
+ scopes = env.params.body.select { |k, _| k.match(/^scopes\[\d+\]$/) }.map { |_, v| v }
+ callback_url = env.params.body["callbackUrl"]?
+ expire = env.params.body["expire"]?.try &.to_i?
+ when "application/json"
+ scopes = env.params.json["scopes"].as(Array).map(&.as_s)
+ callback_url = env.params.json["callbackUrl"]?.try &.as(String)
+ expire = env.params.json["expire"]?.try &.as(Int64)
+ else
+ return error_json(400, "Invalid or missing header 'Content-Type'")
+ end
+
+ if callback_url && callback_url.empty?
+ callback_url = nil
+ end
+
+ if callback_url
+ callback_url = URI.parse(callback_url)
+ end
+
+ if sid = env.get?("sid").try &.as(String)
+ env.response.content_type = "text/html"
+
+ csrf_token = generate_response(sid, {":authorize_token"}, HMAC_KEY, use_nonce: true)
+ return templated "user/authorize_token"
+ else
+ env.response.content_type = "application/json"
+
+ superset_scopes = env.get("scopes").as(Array(String))
+
+ authorized_scopes = [] of String
+ scopes.each do |scope|
+ if scopes_include_scope(superset_scopes, scope)
+ authorized_scopes << scope
+ end
+ end
+
+ access_token = generate_token(user.email, authorized_scopes, expire, HMAC_KEY)
+
+ if callback_url
+ access_token = URI.encode_www_form(access_token)
+
+ if query = callback_url.query
+ query = HTTP::Params.parse(query.not_nil!)
+ else
+ query = HTTP::Params.new
+ end
+
+ query["token"] = access_token
+ callback_url.query = query.to_s
+
+ env.redirect callback_url.to_s
+ else
+ access_token
+ end
+ end
+ end
+
+ def self.unregister_token(env)
+ env.response.content_type = "application/json"
+
+ user = env.get("user").as(User)
+ scopes = env.get("scopes").as(Array(String))
+
+ session = env.params.json["session"]?.try &.as(String)
+ session ||= env.get("session").as(String)
+
+ # Allow tokens to revoke other tokens with correct scope
+ if session == env.get("session").as(String)
+ Invidious::Database::SessionIDs.delete(sid: session)
+ elsif scopes_include_scope(scopes, "GET:tokens")
+ Invidious::Database::SessionIDs.delete(sid: session)
+ else
+ return error_json(400, "Cannot revoke session #{session}")
+ end
+
+ env.response.status_code = 204
+ end
+
+ def self.notifications(env)
+ env.response.content_type = "text/event-stream"
+
+ raw_topics = env.params.body["topics"]? || env.params.query["topics"]?
+ topics = raw_topics.try &.split(",").uniq.first(1000)
+ topics ||= [] of String
+
+ create_notification_stream(env, topics, CONNECTION_CHANNEL)
+ end
+end
diff --git a/src/invidious/routes/api/v1/channels.cr b/src/invidious/routes/api/v1/channels.cr
new file mode 100644
index 00000000..588bbc2a
--- /dev/null
+++ b/src/invidious/routes/api/v1/channels.cr
@@ -0,0 +1,516 @@
+module Invidious::Routes::API::V1::Channels
+ # Macro to avoid duplicating some code below
+ # This sets the `channel` variable, or handles Exceptions.
+ private macro get_channel
+ begin
+ channel = get_about_info(ucid, locale)
+ rescue ex : ChannelRedirect
+ env.response.headers["Location"] = env.request.resource.gsub(ucid, ex.channel_id)
+ return error_json(302, "Channel is unavailable", {"authorId" => ex.channel_id})
+ rescue ex : NotFoundException
+ return error_json(404, ex)
+ rescue ex
+ return error_json(500, ex)
+ end
+ end
+
+ def self.home(env)
+ locale = env.get("preferences").as(Preferences).locale
+ ucid = env.params.url["ucid"]
+
+ env.response.content_type = "application/json"
+
+ # Use the private macro defined above.
+ channel = nil # Make the compiler happy
+ get_channel()
+
+ # Retrieve "sort by" setting from URL parameters
+ sort_by = env.params.query["sort_by"]?.try &.downcase || "newest"
+
+ if channel.is_age_gated
+ begin
+ playlist = get_playlist(channel.ucid.sub("UC", "UULF"))
+ videos = get_playlist_videos(playlist, offset: 0)
+ rescue ex : InfoException
+ # playlist doesnt exist.
+ videos = [] of PlaylistVideo
+ end
+ next_continuation = nil
+ else
+ begin
+ videos, _ = Channel::Tabs.get_videos(channel, sort_by: sort_by)
+ rescue ex
+ return error_json(500, ex)
+ end
+ end
+
+ JSON.build do |json|
+ # TODO: Refactor into `to_json` for InvidiousChannel
+ json.object do
+ json.field "author", channel.author
+ json.field "authorId", channel.ucid
+ json.field "authorUrl", channel.author_url
+
+ json.field "authorBanners" do
+ json.array do
+ if channel.banner
+ qualities = {
+ {width: 2560, height: 424},
+ {width: 2120, height: 351},
+ {width: 1060, height: 175},
+ }
+ qualities.each do |quality|
+ json.object do
+ json.field "url", channel.banner.not_nil!.gsub("=w1060-", "=w#{quality[:width]}-")
+ json.field "width", quality[:width]
+ json.field "height", quality[:height]
+ end
+ end
+
+ json.object do
+ json.field "url", channel.banner.not_nil!.split("=w1060-")[0]
+ json.field "width", 512
+ json.field "height", 288
+ end
+ end
+ end
+ end
+
+ json.field "authorThumbnails" do
+ json.array do
+ qualities = {32, 48, 76, 100, 176, 512}
+
+ qualities.each do |quality|
+ json.object do
+ json.field "url", channel.author_thumbnail.gsub(/=s\d+/, "=s#{quality}")
+ json.field "width", quality
+ json.field "height", quality
+ end
+ end
+ end
+ end
+
+ json.field "subCount", channel.sub_count
+ json.field "totalViews", channel.total_views
+ json.field "joined", channel.joined.to_unix
+
+ json.field "autoGenerated", channel.auto_generated
+ json.field "ageGated", channel.is_age_gated
+ json.field "isFamilyFriendly", channel.is_family_friendly
+ json.field "description", html_to_content(channel.description_html)
+ json.field "descriptionHtml", channel.description_html
+
+ json.field "allowedRegions", channel.allowed_regions
+ json.field "tabs", channel.tabs
+ json.field "tags", channel.tags
+ json.field "authorVerified", channel.verified
+
+ json.field "latestVideos" do
+ json.array do
+ videos.each do |video|
+ video.to_json(locale, json)
+ end
+ end
+ end
+
+ json.field "relatedChannels" do
+ json.array do
+ # Fetch related channels
+ begin
+ related_channels, _ = fetch_related_channels(channel)
+ rescue ex
+ related_channels = [] of SearchChannel
+ end
+
+ related_channels.each do |related_channel|
+ related_channel.to_json(locale, json)
+ end
+ end
+ end # relatedChannels
+
+ end
+ end
+ end
+
+ def self.latest(env)
+ # Remove parameters that could affect this endpoint's behavior
+ env.params.query.delete("sort_by") if env.params.query.has_key?("sort_by")
+ env.params.query.delete("continuation") if env.params.query.has_key?("continuation")
+
+ return self.videos(env)
+ end
+
+ def self.videos(env)
+ locale = env.get("preferences").as(Preferences).locale
+ ucid = env.params.url["ucid"]
+
+ env.response.content_type = "application/json"
+
+ # Use the private macro defined above.
+ channel = nil # Make the compiler happy
+ get_channel()
+
+ # Retrieve some URL parameters
+ sort_by = env.params.query["sort_by"]?.try &.downcase || "newest"
+ continuation = env.params.query["continuation"]?
+
+ if channel.is_age_gated
+ begin
+ playlist = get_playlist(channel.ucid.sub("UC", "UULF"))
+ videos = get_playlist_videos(playlist, offset: 0)
+ rescue ex : InfoException
+ # playlist doesnt exist.
+ videos = [] of PlaylistVideo
+ end
+ next_continuation = nil
+ else
+ begin
+ videos, next_continuation = Channel::Tabs.get_60_videos(
+ channel, continuation: continuation, sort_by: sort_by
+ )
+ rescue ex
+ return error_json(500, ex)
+ end
+ end
+
+ return JSON.build do |json|
+ json.object do
+ json.field "videos" do
+ json.array do
+ videos.each &.to_json(locale, json)
+ end
+ end
+
+ json.field "continuation", next_continuation if next_continuation
+ end
+ end
+ end
+
+ def self.shorts(env)
+ locale = env.get("preferences").as(Preferences).locale
+ ucid = env.params.url["ucid"]
+
+ env.response.content_type = "application/json"
+
+ # Use the private macro defined above.
+ channel = nil # Make the compiler happy
+ get_channel()
+
+ # Retrieve continuation from URL parameters
+ sort_by = env.params.query["sort_by"]?.try &.downcase || "newest"
+ continuation = env.params.query["continuation"]?
+
+ if channel.is_age_gated
+ begin
+ playlist = get_playlist(channel.ucid.sub("UC", "UUSH"))
+ videos = get_playlist_videos(playlist, offset: 0)
+ rescue ex : InfoException
+ # playlist doesnt exist.
+ videos = [] of PlaylistVideo
+ end
+ next_continuation = nil
+ else
+ begin
+ videos, next_continuation = Channel::Tabs.get_shorts(
+ channel, continuation: continuation, sort_by: sort_by
+ )
+ rescue ex
+ return error_json(500, ex)
+ end
+ end
+
+ return JSON.build do |json|
+ json.object do
+ json.field "videos" do
+ json.array do
+ videos.each &.to_json(locale, json)
+ end
+ end
+
+ json.field "continuation", next_continuation if next_continuation
+ end
+ end
+ end
+
+ def self.streams(env)
+ locale = env.get("preferences").as(Preferences).locale
+ ucid = env.params.url["ucid"]
+
+ env.response.content_type = "application/json"
+
+ # Use the private macro defined above.
+ channel = nil # Make the compiler happy
+ get_channel()
+
+ # Retrieve continuation from URL parameters
+ sort_by = env.params.query["sort_by"]?.try &.downcase || "newest"
+ continuation = env.params.query["continuation"]?
+
+ if channel.is_age_gated
+ begin
+ playlist = get_playlist(channel.ucid.sub("UC", "UULV"))
+ videos = get_playlist_videos(playlist, offset: 0)
+ rescue ex : InfoException
+ # playlist doesnt exist.
+ videos = [] of PlaylistVideo
+ end
+ next_continuation = nil
+ else
+ begin
+ videos, next_continuation = Channel::Tabs.get_60_livestreams(
+ channel, continuation: continuation, sort_by: sort_by
+ )
+ rescue ex
+ return error_json(500, ex)
+ end
+ end
+
+ return JSON.build do |json|
+ json.object do
+ json.field "videos" do
+ json.array do
+ videos.each &.to_json(locale, json)
+ end
+ end
+
+ json.field "continuation", next_continuation if next_continuation
+ end
+ end
+ end
+
+ def self.playlists(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ env.response.content_type = "application/json"
+
+ ucid = env.params.url["ucid"]
+ continuation = env.params.query["continuation"]?
+ sort_by = env.params.query["sort"]?.try &.downcase ||
+ env.params.query["sort_by"]?.try &.downcase ||
+ "last"
+
+ # Use the macro defined above
+ channel = nil # Make the compiler happy
+ get_channel()
+
+ items, next_continuation = fetch_channel_playlists(channel.ucid, channel.author, continuation, sort_by)
+
+ JSON.build do |json|
+ json.object do
+ json.field "playlists" do
+ json.array do
+ items.each do |item|
+ item.to_json(locale, json) if item.is_a?(SearchPlaylist)
+ end
+ end
+ end
+
+ json.field "continuation", next_continuation if next_continuation
+ end
+ end
+ end
+
+ def self.podcasts(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ env.response.content_type = "application/json"
+
+ ucid = env.params.url["ucid"]
+ continuation = env.params.query["continuation"]?
+
+ # Use the macro defined above
+ channel = nil # Make the compiler happy
+ get_channel()
+
+ items, next_continuation = fetch_channel_podcasts(channel.ucid, channel.author, continuation)
+
+ JSON.build do |json|
+ json.object do
+ json.field "playlists" do
+ json.array do
+ items.each do |item|
+ item.to_json(locale, json) if item.is_a?(SearchPlaylist)
+ end
+ end
+ end
+
+ json.field "continuation", next_continuation if next_continuation
+ end
+ end
+ end
+
+ def self.releases(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ env.response.content_type = "application/json"
+
+ ucid = env.params.url["ucid"]
+ continuation = env.params.query["continuation"]?
+
+ # Use the macro defined above
+ channel = nil # Make the compiler happy
+ get_channel()
+
+ items, next_continuation = fetch_channel_releases(channel.ucid, channel.author, continuation)
+
+ JSON.build do |json|
+ json.object do
+ json.field "playlists" do
+ json.array do
+ items.each do |item|
+ item.to_json(locale, json) if item.is_a?(SearchPlaylist)
+ end
+ end
+ end
+
+ json.field "continuation", next_continuation if next_continuation
+ end
+ end
+ end
+
+ def self.community(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ env.response.content_type = "application/json"
+
+ ucid = env.params.url["ucid"]
+
+ thin_mode = env.params.query["thin_mode"]?
+ thin_mode = thin_mode == "true"
+
+ format = env.params.query["format"]?
+ format ||= "json"
+
+ continuation = env.params.query["continuation"]?
+ # sort_by = env.params.query["sort_by"]?.try &.downcase
+
+ begin
+ fetch_channel_community(ucid, continuation, locale, format, thin_mode)
+ rescue ex
+ return error_json(500, ex)
+ end
+ end
+
+ def self.post(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ env.response.content_type = "application/json"
+ id = env.params.url["id"].to_s
+ ucid = env.params.query["ucid"]?
+
+ thin_mode = env.params.query["thin_mode"]?
+ thin_mode = thin_mode == "true"
+
+ format = env.params.query["format"]?
+ format ||= "json"
+
+ if ucid.nil?
+ response = YoutubeAPI.resolve_url("https://www.youtube.com/post/#{id}")
+ return error_json(400, "Invalid post ID") if response["error"]?
+ ucid = response.dig("endpoint", "browseEndpoint", "browseId").as_s
+ else
+ ucid = ucid.to_s
+ end
+
+ begin
+ fetch_channel_community_post(ucid, id, locale, format, thin_mode)
+ rescue ex
+ return error_json(500, ex)
+ end
+ end
+
+ def self.post_comments(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ env.response.content_type = "application/json"
+
+ id = env.params.url["id"]
+
+ thin_mode = env.params.query["thin_mode"]?
+ thin_mode = thin_mode == "true"
+
+ format = env.params.query["format"]?
+ format ||= "json"
+
+ continuation = env.params.query["continuation"]?
+
+ case continuation
+ when nil, ""
+ ucid = env.params.query["ucid"]
+ comments = Comments.fetch_community_post_comments(ucid, id)
+ else
+ comments = YoutubeAPI.browse(continuation: continuation)
+ end
+ return Comments.parse_youtube(id, comments, format, locale, thin_mode, is_post: true)
+ end
+
+ def self.channels(env)
+ locale = env.get("preferences").as(Preferences).locale
+ ucid = env.params.url["ucid"]
+
+ env.response.content_type = "application/json"
+
+ # Use the macro defined above
+ channel = nil # Make the compiler happy
+ get_channel()
+
+ continuation = env.params.query["continuation"]?
+
+ begin
+ items, next_continuation = fetch_related_channels(channel, continuation)
+ rescue ex
+ return error_json(500, ex)
+ end
+
+ JSON.build do |json|
+ json.object do
+ json.field "relatedChannels" do
+ json.array do
+ items.each &.to_json(locale, json)
+ end
+ end
+
+ json.field "continuation", next_continuation if next_continuation
+ end
+ end
+ end
+
+ def self.search(env)
+ locale = env.get("preferences").as(Preferences).locale
+ region = env.params.query["region"]?
+
+ env.response.content_type = "application/json"
+
+ query = Invidious::Search::Query.new(env.params.query, :channel, region)
+
+ # Required because we can't (yet) pass multiple parameter to the
+ # `Search::Query` initializer (in this case, an URL segment)
+ query.channel = env.params.url["ucid"]
+
+ begin
+ search_results = query.process
+ rescue ex
+ return error_json(400, ex)
+ end
+
+ JSON.build do |json|
+ json.array do
+ search_results.each do |item|
+ item.to_json(locale, json)
+ end
+ end
+ end
+ end
+
+ # 301 redirect from /api/v1/channels/comments/:ucid
+ # and /api/v1/channels/:ucid/comments to new /api/v1/channels/:ucid/community and
+ # corresponding equivalent URL structure of the other one.
+ def self.channel_comments_redirect(env)
+ env.response.content_type = "application/json"
+ ucid = env.params.url["ucid"]
+
+ env.response.headers["Location"] = "/api/v1/channels/#{ucid}/community?#{env.params.query}"
+ env.response.status_code = 301
+ return
+ end
+end
diff --git a/src/invidious/routes/api/v1/feeds.cr b/src/invidious/routes/api/v1/feeds.cr
new file mode 100644
index 00000000..fea2993c
--- /dev/null
+++ b/src/invidious/routes/api/v1/feeds.cr
@@ -0,0 +1,45 @@
+module Invidious::Routes::API::V1::Feeds
+ def self.trending(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ env.response.content_type = "application/json"
+
+ region = env.params.query["region"]?
+ trending_type = env.params.query["type"]?
+
+ begin
+ trending, plid = fetch_trending(trending_type, region, locale)
+ rescue ex
+ return error_json(500, ex)
+ end
+
+ videos = JSON.build do |json|
+ json.array do
+ trending.each do |video|
+ video.to_json(locale, json)
+ end
+ end
+ end
+
+ videos
+ end
+
+ def self.popular(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ env.response.content_type = "application/json"
+
+ if !CONFIG.popular_enabled
+ error_message = {"error" => "Administrator has disabled this endpoint."}.to_json
+ haltf env, 403, error_message
+ end
+
+ JSON.build do |json|
+ json.array do
+ popular_videos.each do |video|
+ video.to_json(locale, json)
+ end
+ end
+ end
+ end
+end
diff --git a/src/invidious/routes/api/v1/misc.cr b/src/invidious/routes/api/v1/misc.cr
new file mode 100644
index 00000000..093669fe
--- /dev/null
+++ b/src/invidious/routes/api/v1/misc.cr
@@ -0,0 +1,203 @@
+module Invidious::Routes::API::V1::Misc
+ # Stats API endpoint for Invidious
+ def self.stats(env)
+ env.response.content_type = "application/json"
+
+ if !CONFIG.statistics_enabled
+ return {"software" => SOFTWARE}.to_json
+ else
+ # Calculate playback success rate
+ if (tracker = Invidious::Jobs::StatisticsRefreshJob::STATISTICS["playback"]?)
+ tracker = tracker.as(Hash(String, Int64 | Float64))
+
+ if !tracker.empty?
+ total_requests = tracker["totalRequests"]
+ success_count = tracker["successfulRequests"]
+
+ if total_requests.zero?
+ tracker["ratio"] = 1_i64
+ else
+ tracker["ratio"] = (success_count / (total_requests)).round(2)
+ end
+ end
+ end
+
+ return Invidious::Jobs::StatisticsRefreshJob::STATISTICS.to_json
+ end
+ end
+
+ # APIv1 currently uses the same logic for both
+ # user playlists and Invidious playlists. This means that we can't
+ # reasonably split them yet. This should be addressed in APIv2
+ def self.get_playlist(env : HTTP::Server::Context)
+ env.response.content_type = "application/json"
+ plid = env.params.url["plid"]
+
+ offset = env.params.query["index"]?.try &.to_i?
+ offset ||= env.params.query["page"]?.try &.to_i?.try { |page| (page - 1) * 100 }
+ offset ||= 0
+
+ video_id = env.params.query["continuation"]?
+
+ format = env.params.query["format"]?
+ format ||= "json"
+
+ if plid.starts_with? "RD"
+ return env.redirect "/api/v1/mixes/#{plid}"
+ end
+
+ begin
+ playlist = get_playlist(plid)
+ rescue ex : InfoException
+ return error_json(404, ex)
+ rescue ex
+ return error_json(404, "Playlist does not exist.")
+ end
+
+ user = env.get?("user").try &.as(User)
+ if !playlist || playlist.privacy.private? && playlist.author != user.try &.email
+ return error_json(404, "Playlist does not exist.")
+ end
+
+ # includes into the playlist a maximum of 20 videos, before the offset
+ if offset > 0
+ lookback = offset < 50 ? offset : 50
+ response = playlist.to_json(offset - lookback)
+ json_response = JSON.parse(response)
+ else
+ # Unless the continuation is really the offset 0, it becomes expensive.
+ # It happens when the offset is not set.
+ # First we find the actual offset, and then we lookback
+ # it shouldn't happen often though
+
+ lookback = 0
+ response = playlist.to_json(offset, video_id: video_id)
+ json_response = JSON.parse(response)
+
+ if json_response["videos"].as_a.empty?
+ json_response = JSON.parse(response)
+ elsif json_response["videos"].as_a[0]["index"] != offset
+ offset = json_response["videos"].as_a[0]["index"].as_i
+ lookback = offset < 50 ? offset : 50
+ response = playlist.to_json(offset - lookback)
+ json_response = JSON.parse(response)
+ end
+ end
+
+ if format == "html"
+ playlist_html = template_playlist(json_response)
+ index, next_video = json_response["videos"].as_a.skip(1 + lookback).select { |video| !video["author"].as_s.empty? }[0]?.try { |v| {v["index"], v["videoId"]} } || {nil, nil}
+
+ response = {
+ "playlistHtml" => playlist_html,
+ "index" => index,
+ "nextVideo" => next_video,
+ }.to_json
+ end
+
+ response
+ end
+
+ def self.mixes(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ env.response.content_type = "application/json"
+
+ rdid = env.params.url["rdid"]
+
+ continuation = env.params.query["continuation"]?
+ continuation ||= rdid.lchop("RD")[0, 11]
+
+ format = env.params.query["format"]?
+ format ||= "json"
+
+ begin
+ mix = fetch_mix(rdid, continuation, locale: locale)
+
+ if !rdid.ends_with? continuation
+ mix = fetch_mix(rdid, mix.videos[1].id)
+ index = mix.videos.index(mix.videos.select { |video| video.id == continuation }[0]?)
+ end
+
+ mix.videos = mix.videos[index..-1]
+ rescue ex
+ return error_json(500, ex)
+ end
+
+ response = JSON.build do |json|
+ json.object do
+ json.field "title", mix.title
+ json.field "mixId", mix.id
+
+ json.field "videos" do
+ json.array do
+ mix.videos.each do |video|
+ json.object do
+ json.field "title", video.title
+ json.field "videoId", video.id
+ json.field "author", video.author
+
+ json.field "authorId", video.ucid
+ json.field "authorUrl", "/channel/#{video.ucid}"
+
+ json.field "videoThumbnails" do
+ json.array do
+ Invidious::JSONify::APIv1.thumbnails(json, video.id)
+ end
+ end
+
+ json.field "index", video.index
+ json.field "lengthSeconds", video.length_seconds
+ end
+ end
+ end
+ end
+ end
+ end
+
+ if format == "html"
+ response = JSON.parse(response)
+ playlist_html = template_mix(response)
+ next_video = response["videos"].as_a.select { |video| !video["author"].as_s.empty? }[0]?.try &.["videoId"]
+
+ response = {
+ "playlistHtml" => playlist_html,
+ "nextVideo" => next_video,
+ }.to_json
+ end
+
+ response
+ end
+
+ # resolve channel and clip urls, return the UCID
+ def self.resolve_url(env)
+ env.response.content_type = "application/json"
+ url = env.params.query["url"]?
+
+ return error_json(400, "Missing URL to resolve") if !url
+
+ begin
+ resolved_url = YoutubeAPI.resolve_url(url.as(String))
+ endpoint = resolved_url["endpoint"]
+ page_type = endpoint.dig?("commandMetadata", "webCommandMetadata", "webPageType").try &.as_s || ""
+ if page_type == "WEB_PAGE_TYPE_UNKNOWN"
+ return error_json(400, "Unknown url")
+ end
+
+ sub_endpoint = endpoint["watchEndpoint"]? || endpoint["browseEndpoint"]? || endpoint
+ params = sub_endpoint.try &.dig?("params")
+ rescue ex
+ return error_json(500, ex)
+ end
+ JSON.build do |json|
+ json.object do
+ json.field "ucid", sub_endpoint["browseId"].as_s if sub_endpoint["browseId"]?
+ json.field "videoId", sub_endpoint["videoId"].as_s if sub_endpoint["videoId"]?
+ json.field "playlistId", sub_endpoint["playlistId"].as_s if sub_endpoint["playlistId"]?
+ json.field "startTimeSeconds", sub_endpoint["startTimeSeconds"].as_i if sub_endpoint["startTimeSeconds"]?
+ json.field "params", params.try &.as_s
+ json.field "pageType", page_type
+ end
+ end
+ end
+end
diff --git a/src/invidious/routes/api/v1/search.cr b/src/invidious/routes/api/v1/search.cr
new file mode 100644
index 00000000..59a30745
--- /dev/null
+++ b/src/invidious/routes/api/v1/search.cr
@@ -0,0 +1,87 @@
+module Invidious::Routes::API::V1::Search
+ def self.search(env)
+ locale = env.get("preferences").as(Preferences).locale
+ region = env.params.query["region"]?
+
+ env.response.content_type = "application/json"
+
+ query = Invidious::Search::Query.new(env.params.query, :regular, region)
+
+ begin
+ search_results = query.process
+ rescue ex
+ return error_json(400, ex)
+ end
+
+ JSON.build do |json|
+ json.array do
+ search_results.each do |item|
+ item.to_json(locale, json)
+ end
+ end
+ end
+ end
+
+ def self.search_suggestions(env)
+ preferences = env.get("preferences").as(Preferences)
+ region = env.params.query["region"]? || preferences.region
+
+ env.response.content_type = "application/json"
+
+ query = env.params.query["q"]? || ""
+
+ begin
+ client = make_client(URI.parse("https://suggestqueries-clients6.youtube.com"), force_youtube_headers: true)
+ url = "/complete/search?client=youtube&hl=en&gl=#{region}&q=#{URI.encode_www_form(query)}&gs_ri=youtube&ds=yt"
+
+ response = client.get(url).body
+ client.close
+
+ body = JSON.parse(response[19..-2]).as_a
+ suggestions = body[1].as_a[0..-2]
+
+ JSON.build do |json|
+ json.object do
+ json.field "query", body[0].as_s
+ json.field "suggestions" do
+ json.array do
+ suggestions.each do |suggestion|
+ json.string suggestion[0].as_s
+ end
+ end
+ end
+ end
+ end
+ rescue ex
+ return error_json(500, ex)
+ end
+ end
+
+ def self.hashtag(env)
+ hashtag = env.params.url["hashtag"]
+
+ page = env.params.query["page"]?.try &.to_i? || 1
+
+ locale = env.get("preferences").as(Preferences).locale
+ region = env.params.query["region"]?
+ env.response.content_type = "application/json"
+
+ begin
+ results = Invidious::Hashtag.fetch(hashtag, page, region)
+ rescue ex
+ return error_json(400, ex)
+ end
+
+ JSON.build do |json|
+ json.object do
+ json.field "results" do
+ json.array do
+ results.each do |item|
+ item.to_json(locale, json)
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/src/invidious/routes/api/v1/videos.cr b/src/invidious/routes/api/v1/videos.cr
new file mode 100644
index 00000000..368304ac
--- /dev/null
+++ b/src/invidious/routes/api/v1/videos.cr
@@ -0,0 +1,432 @@
+require "html"
+
+module Invidious::Routes::API::V1::Videos
+ def self.videos(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ env.response.content_type = "application/json"
+
+ id = env.params.url["id"]
+ region = env.params.query["region"]?
+ proxy = {"1", "true"}.any? &.== env.params.query["local"]?
+
+ begin
+ video = get_video(id, region: region)
+ rescue ex : NotFoundException
+ return error_json(404, ex)
+ rescue ex
+ return error_json(500, ex)
+ end
+
+ return JSON.build do |json|
+ Invidious::JSONify::APIv1.video(video, json, locale: locale, proxy: proxy)
+ end
+ end
+
+ def self.captions(env)
+ env.response.content_type = "application/json"
+
+ id = env.params.url["id"]
+ region = env.params.query["region"]? || env.params.body["region"]?
+
+ if id.nil? || id.size != 11 || !id.matches?(/^[\w-]+$/)
+ return error_json(400, "Invalid video ID")
+ end
+
+ # See https://github.com/ytdl-org/youtube-dl/blob/6ab30ff50bf6bd0585927cb73c7421bef184f87a/youtube_dl/extractor/youtube.py#L1354
+ # It is possible to use `/api/timedtext?type=list&v=#{id}` and
+ # `/api/timedtext?type=track&v=#{id}&lang=#{lang_code}` directly,
+ # but this does not provide links for auto-generated captions.
+ #
+ # In future this should be investigated as an alternative, since it does not require
+ # getting video info.
+
+ begin
+ video = get_video(id, region: region)
+ rescue ex : NotFoundException
+ haltf env, 404
+ rescue ex
+ haltf env, 500
+ end
+
+ captions = video.captions
+
+ label = env.params.query["label"]?
+ lang = env.params.query["lang"]?
+ tlang = env.params.query["tlang"]?
+
+ if !label && !lang
+ response = JSON.build do |json|
+ json.object do
+ json.field "captions" do
+ json.array do
+ captions.each do |caption|
+ json.object do
+ json.field "label", caption.name
+ json.field "languageCode", caption.language_code
+ json.field "url", "/api/v1/captions/#{id}?label=#{URI.encode_www_form(caption.name)}"
+ end
+ end
+ end
+ end
+ end
+ end
+
+ return response
+ end
+
+ env.response.content_type = "text/vtt; charset=UTF-8"
+
+ if lang
+ caption = captions.select(&.language_code.== lang)
+ else
+ caption = captions.select(&.name.== label)
+ end
+
+ if caption.empty?
+ haltf env, 404
+ else
+ caption = caption[0]
+ end
+
+ if CONFIG.use_innertube_for_captions
+ params = Invidious::Videos::Transcript.generate_param(id, caption.language_code, caption.auto_generated)
+
+ transcript = Invidious::Videos::Transcript.from_raw(
+ YoutubeAPI.get_transcript(params),
+ caption.language_code,
+ caption.auto_generated
+ )
+
+ webvtt = transcript.to_vtt
+ else
+ # Timedtext API handling
+ url = URI.parse("#{caption.base_url}&tlang=#{tlang}").request_target
+
+ # Auto-generated captions often have cues that aren't aligned properly with the video,
+ # as well as some other markup that makes it cumbersome, so we try to fix that here
+ if caption.name.includes? "auto-generated"
+ caption_xml = YT_POOL.client &.get(url).body
+
+ settings_field = {
+ "Kind" => "captions",
+ "Language" => "#{tlang || caption.language_code}",
+ }
+
+ if caption_xml.starts_with?("<?xml")
+ webvtt = caption.timedtext_to_vtt(caption_xml, tlang)
+ else
+ caption_xml = XML.parse(caption_xml)
+
+ webvtt = WebVTT.build(settings_field) do |builder|
+ caption_nodes = caption_xml.xpath_nodes("//transcript/text")
+ caption_nodes.each_with_index do |node, i|
+ start_time = node["start"].to_f.seconds
+ duration = node["dur"]?.try &.to_f.seconds
+ duration ||= start_time
+
+ if caption_nodes.size > i + 1
+ end_time = caption_nodes[i + 1]["start"].to_f.seconds
+ else
+ end_time = start_time + duration
+ end
+
+ text = HTML.unescape(node.content)
+ text = text.gsub(/<font color="#[a-fA-F0-9]{6}">/, "")
+ text = text.gsub(/<\/font>/, "")
+ if md = text.match(/(?<name>.*) : (?<text>.*)/)
+ text = "<v #{md["name"]}>#{md["text"]}</v>"
+ end
+
+ builder.cue(start_time, end_time, text)
+ end
+ end
+ end
+ else
+ uri = URI.parse(url)
+ query_params = uri.query_params
+ query_params["fmt"] = "vtt"
+ uri.query_params = query_params
+ webvtt = YT_POOL.client &.get(uri.request_target).body
+
+ if webvtt.starts_with?("<?xml")
+ webvtt = caption.timedtext_to_vtt(webvtt)
+ else
+ # Some captions have "align:[start/end]" and "position:[num]%"
+ # attributes. Those are causing issues with VideoJS, which is unable
+ # to properly align the captions on the video, so we remove them.
+ #
+ # See: https://github.com/iv-org/invidious/issues/2391
+ webvtt = webvtt.gsub(/([0-9:.]{12} --> [0-9:.]{12}).+/, "\\1")
+ end
+ end
+ end
+
+ if title = env.params.query["title"]?
+ # https://blog.fastmail.com/2011/06/24/download-non-english-filenames/
+ env.response.headers["Content-Disposition"] = "attachment; filename=\"#{URI.encode_www_form(title)}\"; filename*=UTF-8''#{URI.encode_www_form(title)}"
+ end
+
+ webvtt
+ end
+
+ # Fetches YouTube storyboards
+ #
+ # Which are sprites containing x * y preview
+ # thumbnails for individual scenes in a video.
+ # See https://support.jwplayer.com/articles/how-to-add-preview-thumbnails
+ def self.storyboards(env)
+ env.response.content_type = "application/json"
+
+ id = env.params.url["id"]
+ region = env.params.query["region"]?
+
+ begin
+ video = get_video(id, region: region)
+ rescue ex : NotFoundException
+ haltf env, 404
+ rescue ex
+ haltf env, 500
+ end
+
+ width = env.params.query["width"]?.try &.to_i
+ height = env.params.query["height"]?.try &.to_i
+
+ if !width && !height
+ response = JSON.build do |json|
+ json.object do
+ json.field "storyboards" do
+ Invidious::JSONify::APIv1.storyboards(json, id, video.storyboards)
+ end
+ end
+ end
+
+ return response
+ end
+
+ env.response.content_type = "text/vtt"
+
+ # Select a storyboard matching the user's provided width/height
+ storyboard = video.storyboards.select { |x| x.width == width || x.height == height }
+ haltf env, 404 if storyboard.empty?
+
+ # Alias variable, to make the code below esaier to read
+ sb = storyboard[0]
+
+ # Some base URL segments that we'll use to craft the final URLs
+ work_url = sb.proxied_url.dup
+ template_path = sb.proxied_url.path
+
+ # Initialize cue timing variables
+ # NOTE: videojs-vtt-thumbnails gets lost when the cue times don't overlap
+ # (i.e: if cue[n] end time is 1:06:25.000, cue[n+1] start time should be 1:06:25.000)
+ time_delta = sb.interval.milliseconds
+ start_time = 0.milliseconds
+ end_time = time_delta
+
+ # Build a VTT file for VideoJS-vtt plugin
+ vtt_file = WebVTT.build do |vtt|
+ sb.images_count.times do |i|
+ # Replace the variable component part of the path
+ work_url.path = template_path.sub("$M", i)
+
+ sb.rows.times do |j|
+ sb.columns.times do |k|
+ # The URL fragment represents the offset of the thumbnail inside the storyboard image
+ work_url.fragment = "xywh=#{sb.width * k},#{sb.height * j},#{sb.width - 2},#{sb.height}"
+
+ vtt.cue(start_time, end_time, work_url.to_s)
+
+ start_time += time_delta
+ end_time += time_delta
+ end
+ end
+ end
+ end
+
+ # videojs-vtt-thumbnails is not compliant to the VTT specification, it
+ # doesn't unescape the HTML entities, so we have to do it here:
+ # TODO: remove this when we migrate to VideoJS 8
+ return HTML.unescape(vtt_file)
+ end
+
+ def self.annotations(env)
+ env.response.content_type = "text/xml"
+
+ id = env.params.url["id"]
+ source = env.params.query["source"]?
+ source ||= "archive"
+
+ if !id.match(/[a-zA-Z0-9_-]{11}/)
+ haltf env, 400
+ end
+
+ annotations = ""
+
+ case source
+ when "archive"
+ if CONFIG.cache_annotations && (cached_annotation = Invidious::Database::Annotations.select(id))
+ annotations = cached_annotation.annotations
+ else
+ index = CHARS_SAFE.index!(id[0]).to_s.rjust(2, '0')
+
+ # IA doesn't handle leading hyphens,
+ # so we use https://archive.org/details/youtubeannotations_64
+ if index == "62"
+ index = "64"
+ id = id.sub(/^-/, 'A')
+ end
+
+ file = URI.encode_www_form("#{id[0, 3]}/#{id}.xml")
+
+ location = make_client(ARCHIVE_URL, &.get("/download/youtubeannotations_#{index}/#{id[0, 2]}.tar/#{file}"))
+
+ if !location.headers["Location"]?
+ env.response.status_code = location.status_code
+ end
+
+ response = make_client(URI.parse(location.headers["Location"]), &.get(location.headers["Location"]))
+
+ if response.body.empty?
+ haltf env, 404
+ end
+
+ if response.status_code != 200
+ haltf env, response.status_code
+ end
+
+ annotations = response.body
+
+ cache_annotation(id, annotations)
+ end
+ else # "youtube"
+ response = YT_POOL.client &.get("/annotations_invideo?video_id=#{id}")
+
+ if response.status_code != 200
+ haltf env, response.status_code
+ end
+
+ annotations = response.body
+ end
+
+ etag = sha256(annotations)[0, 16]
+ if env.request.headers["If-None-Match"]?.try &.== etag
+ haltf env, 304
+ else
+ env.response.headers["ETag"] = etag
+ annotations
+ end
+ end
+
+ def self.comments(env)
+ locale = env.get("preferences").as(Preferences).locale
+ region = env.params.query["region"]?
+
+ env.response.content_type = "application/json"
+
+ id = env.params.url["id"]
+
+ source = env.params.query["source"]?
+ source ||= "youtube"
+
+ thin_mode = env.params.query["thin_mode"]?
+ thin_mode = thin_mode == "true"
+
+ format = env.params.query["format"]?
+ format ||= "json"
+
+ action = env.params.query["action"]?
+ action ||= "action_get_comments"
+
+ continuation = env.params.query["continuation"]?
+ sort_by = env.params.query["sort_by"]?.try &.downcase
+
+ if source == "youtube"
+ sort_by ||= "top"
+
+ begin
+ comments = Comments.fetch_youtube(id, continuation, format, locale, thin_mode, region, sort_by: sort_by)
+ rescue ex : NotFoundException
+ return error_json(404, ex)
+ rescue ex
+ return error_json(500, ex)
+ end
+
+ return comments
+ elsif source == "reddit"
+ sort_by ||= "confidence"
+
+ begin
+ comments, reddit_thread = Comments.fetch_reddit(id, sort_by: sort_by)
+ rescue ex
+ comments = nil
+ reddit_thread = nil
+ end
+
+ if !reddit_thread || !comments
+ return error_json(404, "No reddit threads found")
+ end
+
+ if format == "json"
+ reddit_thread = JSON.parse(reddit_thread.to_json).as_h
+ reddit_thread["comments"] = JSON.parse(comments.to_json)
+
+ return reddit_thread.to_json
+ else
+ content_html = Frontend::Comments.template_reddit(comments, locale)
+ content_html = Comments.fill_links(content_html, "https", "www.reddit.com")
+ content_html = Comments.replace_links(content_html)
+ response = {
+ "title" => reddit_thread.title,
+ "permalink" => reddit_thread.permalink,
+ "contentHtml" => content_html,
+ }
+
+ return response.to_json
+ end
+ end
+ end
+
+ def self.clips(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ env.response.content_type = "application/json"
+
+ clip_id = env.params.url["id"]
+ region = env.params.query["region"]?
+ proxy = {"1", "true"}.any? &.== env.params.query["local"]?
+
+ response = YoutubeAPI.resolve_url("https://www.youtube.com/clip/#{clip_id}")
+ return error_json(400, "Invalid clip ID") if response["error"]?
+
+ video_id = response.dig?("endpoint", "watchEndpoint", "videoId").try &.as_s
+ return error_json(400, "Invalid clip ID") if video_id.nil?
+
+ start_time = nil
+ end_time = nil
+ clip_title = nil
+
+ if params = response.dig?("endpoint", "watchEndpoint", "params").try &.as_s
+ start_time, end_time, clip_title = parse_clip_parameters(params)
+ end
+
+ begin
+ video = get_video(video_id, region: region)
+ rescue ex : NotFoundException
+ return error_json(404, ex)
+ rescue ex
+ return error_json(500, ex)
+ end
+
+ return JSON.build do |json|
+ json.object do
+ json.field "startTime", start_time
+ json.field "endTime", end_time
+ json.field "clipTitle", clip_title
+ json.field "video" do
+ Invidious::JSONify::APIv1.video(video, json, locale: locale, proxy: proxy)
+ end
+ end
+ end
+ end
+end
diff --git a/src/invidious/routes/base_route.cr b/src/invidious/routes/base_route.cr
deleted file mode 100644
index 07c6f15b..00000000
--- a/src/invidious/routes/base_route.cr
+++ /dev/null
@@ -1,2 +0,0 @@
-abstract class Invidious::Routes::BaseRoute
-end
diff --git a/src/invidious/routes/before_all.cr b/src/invidious/routes/before_all.cr
new file mode 100644
index 00000000..5695dee9
--- /dev/null
+++ b/src/invidious/routes/before_all.cr
@@ -0,0 +1,126 @@
+module Invidious::Routes::BeforeAll
+ def self.handle(env)
+ preferences = Preferences.from_json("{}")
+
+ begin
+ if prefs_cookie = env.request.cookies["PREFS"]?
+ preferences = Preferences.from_json(URI.decode_www_form(prefs_cookie.value))
+ else
+ if language_header = env.request.headers["Accept-Language"]?
+ if language = ANG.language_negotiator.best(language_header, LOCALES.keys)
+ preferences.locale = language.header
+ end
+ end
+ end
+ rescue
+ preferences = Preferences.from_json("{}")
+ end
+
+ env.set "preferences", preferences
+ env.response.headers["X-XSS-Protection"] = "1; mode=block"
+ env.response.headers["X-Content-Type-Options"] = "nosniff"
+
+ # Allow media resources to be loaded from google servers
+ # TODO: check if *.youtube.com can be removed
+ if CONFIG.disabled?("local") || !preferences.local
+ extra_media_csp = " https://*.googlevideo.com:443 https://*.youtube.com:443"
+ else
+ extra_media_csp = ""
+ end
+
+ # Only allow the pages at /embed/* to be embedded
+ if env.request.resource.starts_with?("/embed")
+ frame_ancestors = "'self' file: http: https:"
+ else
+ frame_ancestors = "'none'"
+ end
+
+ # TODO: Remove style-src's 'unsafe-inline', requires to remove all
+ # inline styles (<style> [..] </style>, style=" [..] ")
+ env.response.headers["Content-Security-Policy"] = {
+ "default-src 'none'",
+ "script-src 'self'",
+ "style-src 'self' 'unsafe-inline'",
+ "img-src 'self' data:",
+ "font-src 'self' data:",
+ "connect-src 'self'",
+ "manifest-src 'self'",
+ "media-src 'self' blob:" + extra_media_csp,
+ "child-src 'self' blob:",
+ "frame-src 'self'",
+ "frame-ancestors " + frame_ancestors,
+ }.join("; ")
+
+ env.response.headers["Referrer-Policy"] = "same-origin"
+
+ # Ask the chrom*-based browsers to disable FLoC
+ # See: https://blog.runcloud.io/google-floc/
+ env.response.headers["Permissions-Policy"] = "interest-cohort=()"
+
+ if (Kemal.config.ssl || CONFIG.https_only) && CONFIG.hsts
+ env.response.headers["Strict-Transport-Security"] = "max-age=31536000; includeSubDomains; preload"
+ end
+
+ return if {
+ "/sb/",
+ "/vi/",
+ "/s_p/",
+ "/yts/",
+ "/ggpht/",
+ "/api/manifest/",
+ "/videoplayback",
+ "/latest_version",
+ "/download",
+ }.any? { |r| env.request.resource.starts_with? r }
+
+ if env.request.cookies.has_key? "SID"
+ sid = env.request.cookies["SID"].value
+
+ if sid.starts_with? "v1:"
+ raise "Cannot use token as SID"
+ end
+
+ if email = Database::SessionIDs.select_email(sid)
+ user = Database::Users.select!(email: email)
+ csrf_token = generate_response(sid, {
+ ":authorize_token",
+ ":playlist_ajax",
+ ":signout",
+ ":subscription_ajax",
+ ":token_ajax",
+ ":watch_ajax",
+ }, HMAC_KEY, 1.week)
+
+ preferences = user.preferences
+ env.set "preferences", preferences
+
+ env.set "sid", sid
+ env.set "csrf_token", csrf_token
+ env.set "user", user
+ end
+ end
+
+ dark_mode = convert_theme(env.params.query["dark_mode"]?) || preferences.dark_mode.to_s
+ thin_mode = env.params.query["thin_mode"]? || preferences.thin_mode.to_s
+ thin_mode = thin_mode == "true"
+ locale = env.params.query["hl"]? || preferences.locale
+
+ preferences.dark_mode = dark_mode
+ preferences.thin_mode = thin_mode
+ preferences.locale = locale
+ env.set "preferences", preferences
+
+ current_page = env.request.path
+ if env.request.query
+ query = HTTP::Params.parse(env.request.query.not_nil!)
+
+ if query["referer"]?
+ query["referer"] = get_referer(env, "/")
+ end
+
+ current_page += "?#{query}"
+ end
+
+ env.set "current_page", URI.encode_www_form(current_page)
+ end
+end
diff --git a/src/invidious/routes/channels.cr b/src/invidious/routes/channels.cr
new file mode 100644
index 00000000..7d634cbb
--- /dev/null
+++ b/src/invidious/routes/channels.cr
@@ -0,0 +1,423 @@
+{% skip_file if flag?(:api_only) %}
+
+module Invidious::Routes::Channels
+ # Redirection for unsupported routes ("tabs")
+ def self.redirect_home(env)
+ ucid = env.params.url["ucid"]
+ return env.redirect "/channel/#{URI.encode_www_form(ucid)}"
+ end
+
+ def self.home(env)
+ self.videos(env)
+ end
+
+ def self.videos(env)
+ data = self.fetch_basic_information(env)
+ return data if !data.is_a?(Tuple)
+
+ locale, user, subscriptions, continuation, ucid, channel = data
+
+ sort_by = env.params.query["sort_by"]?.try &.downcase
+
+ if channel.auto_generated
+ sort_by ||= "last"
+ sort_options = {"last", "oldest", "newest"}
+
+ items, next_continuation = fetch_channel_playlists(
+ channel.ucid, channel.author, continuation, sort_by
+ )
+
+ items.uniq! do |item|
+ if item.responds_to?(:title)
+ item.title
+ elsif item.responds_to?(:author)
+ item.author
+ end
+ end
+ items = items.select(SearchPlaylist)
+ items.each(&.author = "")
+ else
+ # Fetch items and continuation token
+ if channel.is_age_gated
+ sort_by = ""
+ sort_options = [] of String
+ begin
+ playlist = get_playlist(channel.ucid.sub("UC", "UULF"))
+ items = get_playlist_videos(playlist, offset: 0)
+ rescue ex : InfoException
+ # playlist doesnt exist.
+ items = [] of PlaylistVideo
+ end
+ next_continuation = nil
+ else
+ sort_by ||= "newest"
+ sort_options = {"newest", "oldest", "popular"}
+
+ items, next_continuation = Channel::Tabs.get_60_videos(
+ channel, continuation: continuation, sort_by: sort_by
+ )
+ end
+ end
+
+ selected_tab = Frontend::ChannelPage::TabsAvailable::Videos
+ templated "channel"
+ end
+
+ def self.shorts(env)
+ data = self.fetch_basic_information(env)
+ return data if !data.is_a?(Tuple)
+
+ locale, user, subscriptions, continuation, ucid, channel = data
+
+ if !channel.tabs.includes? "shorts"
+ return env.redirect "/channel/#{channel.ucid}"
+ end
+
+ if channel.is_age_gated
+ sort_by = ""
+ sort_options = [] of String
+ begin
+ playlist = get_playlist(channel.ucid.sub("UC", "UUSH"))
+ items = get_playlist_videos(playlist, offset: 0)
+ rescue ex : InfoException
+ # playlist doesnt exist.
+ items = [] of PlaylistVideo
+ end
+ next_continuation = nil
+ else
+ sort_by = env.params.query["sort_by"]?.try &.downcase || "newest"
+ sort_options = {"newest", "oldest", "popular"}
+
+ # Fetch items and continuation token
+ items, next_continuation = Channel::Tabs.get_shorts(
+ channel, continuation: continuation, sort_by: sort_by
+ )
+ end
+
+ selected_tab = Frontend::ChannelPage::TabsAvailable::Shorts
+ templated "channel"
+ end
+
+ def self.streams(env)
+ data = self.fetch_basic_information(env)
+ return data if !data.is_a?(Tuple)
+
+ locale, user, subscriptions, continuation, ucid, channel = data
+
+ if !channel.tabs.includes? "streams"
+ return env.redirect "/channel/#{channel.ucid}"
+ end
+
+ if channel.is_age_gated
+ sort_by = ""
+ sort_options = [] of String
+ begin
+ playlist = get_playlist(channel.ucid.sub("UC", "UULV"))
+ items = get_playlist_videos(playlist, offset: 0)
+ rescue ex : InfoException
+ # playlist doesnt exist.
+ items = [] of PlaylistVideo
+ end
+ next_continuation = nil
+ else
+ sort_by = env.params.query["sort_by"]?.try &.downcase || "newest"
+ sort_options = {"newest", "oldest", "popular"}
+
+ # Fetch items and continuation token
+ items, next_continuation = Channel::Tabs.get_60_livestreams(
+ channel, continuation: continuation, sort_by: sort_by
+ )
+ end
+
+ selected_tab = Frontend::ChannelPage::TabsAvailable::Streams
+ templated "channel"
+ end
+
+ def self.playlists(env)
+ data = self.fetch_basic_information(env)
+ return data if !data.is_a?(Tuple)
+
+ locale, user, subscriptions, continuation, ucid, channel = data
+
+ sort_options = {"last", "oldest", "newest"}
+ sort_by = env.params.query["sort_by"]?.try &.downcase
+
+ if channel.auto_generated
+ return env.redirect "/channel/#{channel.ucid}"
+ end
+
+ items, next_continuation = fetch_channel_playlists(
+ channel.ucid, channel.author, continuation, (sort_by || "last")
+ )
+
+ items = items.select(SearchPlaylist)
+ items.each(&.author = "")
+
+ selected_tab = Frontend::ChannelPage::TabsAvailable::Playlists
+ templated "channel"
+ end
+
+ def self.podcasts(env)
+ data = self.fetch_basic_information(env)
+ return data if !data.is_a?(Tuple)
+
+ locale, user, subscriptions, continuation, ucid, channel = data
+
+ sort_by = ""
+ sort_options = [] of String
+
+ items, next_continuation = fetch_channel_podcasts(
+ channel.ucid, channel.author, continuation
+ )
+
+ items = items.select(SearchPlaylist)
+ items.each(&.author = "")
+
+ selected_tab = Frontend::ChannelPage::TabsAvailable::Podcasts
+ templated "channel"
+ end
+
+ def self.releases(env)
+ data = self.fetch_basic_information(env)
+ return data if !data.is_a?(Tuple)
+
+ locale, user, subscriptions, continuation, ucid, channel = data
+
+ sort_by = ""
+ sort_options = [] of String
+
+ items, next_continuation = fetch_channel_releases(
+ channel.ucid, channel.author, continuation
+ )
+
+ items = items.select(SearchPlaylist)
+ items.each(&.author = "")
+
+ selected_tab = Frontend::ChannelPage::TabsAvailable::Releases
+ templated "channel"
+ end
+
+ def self.community(env)
+ data = self.fetch_basic_information(env)
+ if !data.is_a?(Tuple)
+ return data
+ end
+ locale, user, subscriptions, continuation, ucid, channel = data
+
+ # redirect to post page
+ if lb = env.params.query["lb"]?
+ env.redirect "/post/#{URI.encode_www_form(lb)}?ucid=#{URI.encode_www_form(ucid)}"
+ end
+
+ thin_mode = env.params.query["thin_mode"]? || env.get("preferences").as(Preferences).thin_mode
+ thin_mode = thin_mode == "true"
+
+ continuation = env.params.query["continuation"]?
+
+ if !channel.tabs.includes? "community"
+ return env.redirect "/channel/#{channel.ucid}"
+ end
+
+ # TODO: support sort options for community posts
+ sort_by = ""
+ sort_options = [] of String
+
+ begin
+ items = JSON.parse(fetch_channel_community(ucid, continuation, locale, "json", thin_mode))
+ rescue ex : InfoException
+ env.response.status_code = 500
+ error_message = ex.message
+ rescue ex : NotFoundException
+ env.response.status_code = 404
+ error_message = ex.message
+ rescue ex
+ return error_template(500, ex)
+ end
+
+ templated "community"
+ end
+
+ def self.post(env)
+ # /post/{postId}
+ id = env.params.url["id"]
+ ucid = env.params.query["ucid"]?
+
+ prefs = env.get("preferences").as(Preferences)
+
+ locale = prefs.locale
+
+ thin_mode = env.params.query["thin_mode"]? || prefs.thin_mode
+ thin_mode = thin_mode == "true"
+
+ nojs = env.params.query["nojs"]?
+
+ nojs ||= "0"
+ nojs = nojs == "1"
+
+ if !ucid.nil?
+ ucid = ucid.to_s
+ post_response = fetch_channel_community_post(ucid, id, locale, "json", thin_mode)
+ else
+ # resolve the url to get the author's UCID
+ response = YoutubeAPI.resolve_url("https://www.youtube.com/post/#{id}")
+ return error_template(400, "Invalid post ID") if response["error"]?
+
+ ucid = response.dig("endpoint", "browseEndpoint", "browseId").as_s
+ post_response = fetch_channel_community_post(ucid, id, locale, "json", thin_mode)
+ end
+
+ post_response = JSON.parse(post_response)
+
+ if nojs
+ comments = Comments.fetch_community_post_comments(ucid, id)
+ comment_html = JSON.parse(Comments.parse_youtube(id, comments, "html", locale, thin_mode, is_post: true))["contentHtml"]
+ end
+ templated "post"
+ end
+
+ def self.channels(env)
+ data = self.fetch_basic_information(env)
+ return data if !data.is_a?(Tuple)
+
+ locale, user, subscriptions, continuation, ucid, channel = data
+
+ if channel.auto_generated
+ return env.redirect "/channel/#{channel.ucid}"
+ end
+
+ items, next_continuation = fetch_related_channels(channel, continuation)
+
+ # Featured/related channels can't be sorted
+ sort_options = [] of String
+ sort_by = nil
+
+ selected_tab = Frontend::ChannelPage::TabsAvailable::Channels
+ templated "channel"
+ end
+
+ def self.about(env)
+ data = self.fetch_basic_information(env)
+ if !data.is_a?(Tuple)
+ return data
+ end
+ locale, user, subscriptions, continuation, ucid, channel = data
+
+ env.redirect "/channel/#{ucid}"
+ end
+
+ private KNOWN_TABS = {
+ "home", "videos", "shorts", "streams", "podcasts",
+ "releases", "playlists", "community", "channels", "about",
+ }
+
+ # Redirects brand url channels to a normal /channel/:ucid route
+ def self.brand_redirect(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ # /attribution_link endpoint needs both the `a` and `u` parameter
+ # and in order to avoid detection from YouTube we should only send the required ones
+ # without any of the additional url parameters that only Invidious uses.
+ yt_url_params = URI::Params.encode(env.params.query.to_h.select(["a", "u", "user"]))
+
+ # Retrieves URL params that only Invidious uses
+ invidious_url_params = env.params.query.dup
+ invidious_url_params.delete_all("a")
+ invidious_url_params.delete_all("u")
+ invidious_url_params.delete_all("user")
+
+ begin
+ resolved_url = YoutubeAPI.resolve_url("https://youtube.com#{env.request.path}#{yt_url_params.size > 0 ? "?#{yt_url_params}" : ""}")
+ ucid = resolved_url["endpoint"]["browseEndpoint"]["browseId"]
+ rescue ex : InfoException | KeyError
+ return error_template(404, translate(locale, "This channel does not exist."))
+ end
+
+ selected_tab = env.params.url["tab"]?
+
+ if KNOWN_TABS.includes? selected_tab
+ url = "/channel/#{ucid}/#{selected_tab}"
+ else
+ url = "/channel/#{ucid}"
+ end
+
+ url += "?#{invidious_url_params}" if !invidious_url_params.empty?
+
+ return env.redirect url
+ end
+
+ # Handles redirects for the /profile endpoint
+ def self.profile(env)
+ # The /profile endpoint is special. If passed into the resolve_url
+ # endpoint YouTube would return a sign in page instead of an /channel/:ucid
+ # thus we'll add an edge case and handle it here.
+
+ uri_params = env.params.query.size > 0 ? "?#{env.params.query}" : ""
+
+ user = env.params.query["user"]?
+ if !user
+ return error_template(404, "This channel does not exist.")
+ else
+ env.redirect "/user/#{user}#{uri_params}"
+ end
+ end
+
+ def self.live(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ # Appears to be a bug in routing, having several routes configured
+ # as `/a/:a`, `/b/:a`, `/c/:a` results in 404
+ value = env.request.resource.split("/")[2]
+ body = ""
+ {"channel", "user", "c"}.each do |type|
+ response = YT_POOL.client &.get("/#{type}/#{value}/live?disable_polymer=1")
+ if response.status_code == 200
+ body = response.body
+ end
+ end
+
+ video_id = body.match(/'VIDEO_ID': "(?<id>[a-zA-Z0-9_-]{11})"/).try &.["id"]?
+ if video_id
+ params = [] of String
+ env.params.query.each do |k, v|
+ params << "#{k}=#{v}"
+ end
+ params = params.join("&")
+
+ url = "/watch?v=#{video_id}"
+ if !params.empty?
+ url += "&#{params}"
+ end
+
+ env.redirect url
+ else
+ env.redirect "/channel/#{value}"
+ end
+ end
+
+ private def self.fetch_basic_information(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ if user
+ user = user.as(User)
+ subscriptions = user.subscriptions
+ end
+ subscriptions ||= [] of String
+
+ ucid = env.params.url["ucid"]
+ continuation = env.params.query["continuation"]?
+
+ begin
+ channel = get_about_info(ucid, locale)
+ rescue ex : ChannelRedirect
+ return env.redirect env.request.resource.gsub(ucid, ex.channel_id)
+ rescue ex : NotFoundException
+ return error_template(404, ex)
+ rescue ex
+ return error_template(500, ex)
+ end
+
+ env.set "search", "channel:#{ucid} "
+ return {locale, user, subscriptions, continuation, ucid, channel}
+ end
+end
diff --git a/src/invidious/routes/embed.cr b/src/invidious/routes/embed.cr
index 5db32788..266f7ba4 100644
--- a/src/invidious/routes/embed.cr
+++ b/src/invidious/routes/embed.cr
@@ -1,12 +1,19 @@
-class Invidious::Routes::Embed < Invidious::Routes::BaseRoute
- def redirect(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+{% skip_file if flag?(:api_only) %}
+module Invidious::Routes::Embed
+ def self.redirect(env)
+ locale = env.get("preferences").as(Preferences).locale
if plid = env.params.query["list"]?.try &.gsub(/[^a-zA-Z0-9_-]/, "")
begin
- playlist = get_playlist(PG_DB, plid, locale: locale)
+ playlist = get_playlist(plid)
offset = env.params.query["index"]?.try &.to_i? || 0
- videos = get_playlist_videos(PG_DB, playlist, offset: offset, locale: locale)
+ videos = get_playlist_videos(playlist, offset: offset)
+ if videos.empty?
+ url = "/playlist?list=#{plid}"
+ raise NotFoundException.new(translate(locale, "error_video_not_in_playlist", url))
+ end
+ rescue ex : NotFoundException
+ return error_template(404, ex)
rescue ex
return error_template(500, ex)
end
@@ -23,12 +30,12 @@ class Invidious::Routes::Embed < Invidious::Routes::BaseRoute
env.redirect url
end
- def show(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ def self.show(env)
+ locale = env.get("preferences").as(Preferences).locale
id = env.params.url["id"]
plid = env.params.query["list"]?.try &.gsub(/[^a-zA-Z0-9_-]/, "")
- continuation = process_continuation(PG_DB, env.params.query, plid, id)
+ continuation = process_continuation(env.params.query, plid, id)
if md = env.params.query["playlist"]?
.try &.match(/[a-zA-Z0-9_-]{11}(,[a-zA-Z0-9_-]{11})*/)
@@ -58,9 +65,15 @@ class Invidious::Routes::Embed < Invidious::Routes::BaseRoute
if plid
begin
- playlist = get_playlist(PG_DB, plid, locale: locale)
+ playlist = get_playlist(plid)
offset = env.params.query["index"]?.try &.to_i? || 0
- videos = get_playlist_videos(PG_DB, playlist, offset: offset, locale: locale)
+ videos = get_playlist_videos(playlist, offset: offset)
+ if videos.empty?
+ url = "/playlist?list=#{plid}"
+ raise NotFoundException.new(translate(locale, "error_video_not_in_playlist", url))
+ end
+ rescue ex : NotFoundException
+ return error_template(404, ex)
rescue ex
return error_template(500, ex)
end
@@ -117,9 +130,9 @@ class Invidious::Routes::Embed < Invidious::Routes::BaseRoute
subscriptions ||= [] of String
begin
- video = get_video(id, PG_DB, region: params.region)
- rescue ex : VideoRedirect
- return env.redirect env.request.resource.gsub(id, ex.video_id)
+ video = get_video(id, region: params.region)
+ rescue ex : NotFoundException
+ return error_template(404, ex)
rescue ex
return error_template(500, ex)
end
@@ -134,8 +147,8 @@ class Invidious::Routes::Embed < Invidious::Routes::BaseRoute
# PG_DB.exec("UPDATE users SET watched = array_append(watched, $1) WHERE email = $2", id, user.as(User).email)
# end
- if notifications && notifications.includes? id
- PG_DB.exec("UPDATE users SET notifications = array_remove(notifications, $1) WHERE email = $2", id, user.as(User).email)
+ if CONFIG.enable_user_notifications && notifications && notifications.includes? id
+ Invidious::Database::Users.remove_notification(user.as(User), id)
env.get("user").as(User).notifications.delete(id)
notifications.delete(id)
end
@@ -165,12 +178,12 @@ class Invidious::Routes::Embed < Invidious::Routes::BaseRoute
captions = video.captions
preferred_captions = captions.select { |caption|
- params.preferred_captions.includes?(caption.name.simpleText) ||
- params.preferred_captions.includes?(caption.languageCode.split("-")[0])
+ params.preferred_captions.includes?(caption.name) ||
+ params.preferred_captions.includes?(caption.language_code.split("-")[0])
}
preferred_captions.sort_by! { |caption|
- (params.preferred_captions.index(caption.name.simpleText) ||
- params.preferred_captions.index(caption.languageCode.split("-")[0])).not_nil!
+ (params.preferred_captions.index(caption.name) ||
+ params.preferred_captions.index(caption.language_code.split("-")[0])).not_nil!
}
captions = captions - preferred_captions
diff --git a/src/invidious/routes/errors.cr b/src/invidious/routes/errors.cr
new file mode 100644
index 00000000..1e9ab44e
--- /dev/null
+++ b/src/invidious/routes/errors.cr
@@ -0,0 +1,52 @@
+module Invidious::Routes::ErrorRoutes
+ def self.error_404(env)
+ # Workaround for #3117
+ if HOST_URL.empty? && env.request.path.starts_with?("/v1/storyboards/sb")
+ return env.redirect "#{env.request.path[15..]}?#{env.params.query}"
+ end
+
+ if md = env.request.path.match(/^\/(?<id>([a-zA-Z0-9_-]{11})|(\w+))$/)
+ item = md["id"]
+
+ # Check if item is branding URL e.g. https://youtube.com/gaming
+ response = YT_POOL.client &.get("/#{item}")
+
+ if response.status_code == 301
+ response = YT_POOL.client &.get(URI.parse(response.headers["Location"]).request_target)
+ end
+
+ if response.body.empty?
+ env.response.headers["Location"] = "/"
+ haltf env, status_code: 302
+ end
+
+ html = XML.parse_html(response.body)
+ ucid = html.xpath_node(%q(//link[@rel="canonical"])).try &.["href"].split("/")[-1]
+
+ if ucid
+ env.response.headers["Location"] = "/channel/#{ucid}"
+ haltf env, status_code: 302
+ end
+
+ params = [] of String
+ env.params.query.each do |k, v|
+ params << "#{k}=#{v}"
+ end
+ params = params.join("&")
+
+ url = "/watch?v=#{item}"
+ if !params.empty?
+ url += "&#{params}"
+ end
+
+ # Check if item is video ID
+ if item.match(/^[a-zA-Z0-9_-]{11}$/) && YT_POOL.client &.head("/watch?v=#{item}").status_code != 404
+ env.response.headers["Location"] = url
+ haltf env, status_code: 302
+ end
+ end
+
+ env.response.headers["Location"] = "/"
+ haltf env, status_code: 302
+ end
+end
diff --git a/src/invidious/routes/feeds.cr b/src/invidious/routes/feeds.cr
new file mode 100644
index 00000000..ea7fb396
--- /dev/null
+++ b/src/invidious/routes/feeds.cr
@@ -0,0 +1,462 @@
+{% skip_file if flag?(:api_only) %}
+
+module Invidious::Routes::Feeds
+ def self.view_all_playlists_redirect(env)
+ env.redirect "/feed/playlists"
+ end
+
+ def self.playlists(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ referer = get_referer(env)
+
+ return env.redirect "/" if user.nil?
+
+ user = user.as(User)
+
+ # TODO: make a single DB call and separate the items here?
+ items_created = Invidious::Database::Playlists.select_like_iv(user.email)
+ items_created.map! do |item|
+ item.author = ""
+ item
+ end
+
+ items_saved = Invidious::Database::Playlists.select_not_like_iv(user.email)
+ items_saved.map! do |item|
+ item.author = ""
+ item
+ end
+
+ templated "feeds/playlists"
+ end
+
+ def self.popular(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ if CONFIG.popular_enabled
+ templated "feeds/popular"
+ else
+ message = translate(locale, "The Popular feed has been disabled by the administrator.")
+ templated "message"
+ end
+ end
+
+ def self.trending(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ trending_type = env.params.query["type"]?
+ trending_type ||= "Default"
+
+ region = env.params.query["region"]?
+ region ||= env.get("preferences").as(Preferences).region
+
+ begin
+ trending, plid = fetch_trending(trending_type, region, locale)
+ rescue ex
+ return error_template(500, ex)
+ end
+
+ templated "feeds/trending"
+ end
+
+ def self.subscriptions(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ sid = env.get? "sid"
+ referer = get_referer(env)
+
+ if !user
+ return env.redirect referer
+ end
+
+ user = user.as(User)
+ sid = sid.as(String)
+ token = user.token
+
+ if user.preferences.unseen_only
+ env.set "show_watched", true
+ end
+
+ # Refresh account
+ headers = HTTP::Headers.new
+ headers["Cookie"] = env.request.headers["Cookie"]
+
+ max_results = env.params.query["max_results"]?.try &.to_i?.try &.clamp(0, MAX_ITEMS_PER_PAGE)
+ max_results ||= user.preferences.max_results
+ max_results ||= CONFIG.default_user_preferences.max_results
+
+ page = env.params.query["page"]?.try &.to_i?
+ page ||= 1
+
+ videos, notifications = get_subscription_feed(user, max_results, page)
+
+ if CONFIG.enable_user_notifications
+ # "updated" here is used for delivering new notifications, so if
+ # we know a user has looked at their feed e.g. in the past 10 minutes,
+ # they've already seen a video posted 20 minutes ago, and don't need
+ # to be notified.
+ Invidious::Database::Users.clear_notifications(user)
+ user.notifications = [] of String
+ end
+ env.set "user", user
+
+ # Used for pagination links
+ base_url = "/feed/subscriptions"
+ base_url += "?max_results=#{max_results}" if env.params.query.has_key?("max_results")
+
+ templated "feeds/subscriptions"
+ end
+
+ def self.history(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ referer = get_referer(env)
+
+ page = env.params.query["page"]?.try &.to_i?
+ page ||= 1
+
+ if !user
+ return env.redirect referer
+ end
+
+ user = user.as(User)
+
+ max_results = env.params.query["max_results"]?.try &.to_i?.try &.clamp(0, MAX_ITEMS_PER_PAGE)
+ max_results ||= user.preferences.max_results
+ max_results ||= CONFIG.default_user_preferences.max_results
+
+ if user.watched[(page - 1) * max_results]?
+ watched = user.watched.reverse[(page - 1) * max_results, max_results]
+ end
+ watched ||= [] of String
+
+ # Used for pagination links
+ base_url = "/feed/history"
+ base_url += "?max_results=#{max_results}" if env.params.query.has_key?("max_results")
+
+ templated "feeds/history"
+ end
+
+ # RSS feeds
+
+ def self.rss_channel(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ env.response.headers["Content-Type"] = "application/atom+xml"
+ env.response.content_type = "application/atom+xml"
+
+ ucid = env.params.url["ucid"]
+
+ params = HTTP::Params.parse(env.params.query["params"]? || "")
+
+ begin
+ channel = get_about_info(ucid, locale)
+ rescue ex : ChannelRedirect
+ return env.redirect env.request.resource.gsub(ucid, ex.channel_id)
+ rescue ex : NotFoundException
+ return error_atom(404, ex)
+ rescue ex
+ return error_atom(500, ex)
+ end
+
+ namespaces = {
+ "yt" => "http://www.youtube.com/xml/schemas/2015",
+ "media" => "http://search.yahoo.com/mrss/",
+ "default" => "http://www.w3.org/2005/Atom",
+ }
+
+ response = YT_POOL.client &.get("/feeds/videos.xml?channel_id=#{channel.ucid}")
+ rss = XML.parse(response.body)
+
+ videos = rss.xpath_nodes("//default:feed/default:entry", namespaces).map do |entry|
+ video_id = entry.xpath_node("yt:videoId", namespaces).not_nil!.content
+ title = entry.xpath_node("default:title", namespaces).not_nil!.content
+
+ published = Time.parse_rfc3339(entry.xpath_node("default:published", namespaces).not_nil!.content)
+ updated = Time.parse_rfc3339(entry.xpath_node("default:updated", namespaces).not_nil!.content)
+
+ author = entry.xpath_node("default:author/default:name", namespaces).not_nil!.content
+ ucid = entry.xpath_node("yt:channelId", namespaces).not_nil!.content
+ description_html = entry.xpath_node("media:group/media:description", namespaces).not_nil!.to_s
+ views = entry.xpath_node("media:group/media:community/media:statistics", namespaces).not_nil!.["views"].to_i64
+
+ SearchVideo.new({
+ title: title,
+ id: video_id,
+ author: author,
+ ucid: ucid,
+ published: published,
+ views: views,
+ description_html: description_html,
+ length_seconds: 0,
+ premiere_timestamp: nil,
+ author_verified: false,
+ badges: VideoBadges::None,
+ })
+ end
+
+ XML.build(indent: " ", encoding: "UTF-8") do |xml|
+ xml.element("feed", "xmlns:yt": "http://www.youtube.com/xml/schemas/2015",
+ "xmlns:media": "http://search.yahoo.com/mrss/", xmlns: "http://www.w3.org/2005/Atom",
+ "xml:lang": "en-US") do
+ xml.element("link", rel: "self", href: "#{HOST_URL}#{env.request.resource}")
+ xml.element("id") { xml.text "yt:channel:#{channel.ucid}" }
+ xml.element("yt:channelId") { xml.text channel.ucid }
+ xml.element("icon") { xml.text channel.author_thumbnail }
+ xml.element("title") { xml.text channel.author }
+ xml.element("link", rel: "alternate", href: "#{HOST_URL}/channel/#{channel.ucid}")
+
+ xml.element("author") do
+ xml.element("name") { xml.text channel.author }
+ xml.element("uri") { xml.text "#{HOST_URL}/channel/#{channel.ucid}" }
+ end
+
+ xml.element("image") do
+ xml.element("url") { xml.text channel.author_thumbnail }
+ xml.element("title") { xml.text channel.author }
+ xml.element("link", rel: "self", href: "#{HOST_URL}#{env.request.resource}")
+ end
+
+ videos.each do |video|
+ video.to_xml(channel.auto_generated, params, xml)
+ end
+ end
+ end
+ end
+
+ def self.rss_private(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ env.response.headers["Content-Type"] = "application/atom+xml"
+ env.response.content_type = "application/atom+xml"
+
+ token = env.params.query["token"]?
+
+ if !token
+ haltf env, status_code: 403
+ end
+
+ user = Invidious::Database::Users.select(token: token.strip)
+ if !user
+ haltf env, status_code: 403
+ end
+
+ max_results = env.params.query["max_results"]?.try &.to_i?.try &.clamp(0, MAX_ITEMS_PER_PAGE)
+ max_results ||= user.preferences.max_results
+ max_results ||= CONFIG.default_user_preferences.max_results
+
+ page = env.params.query["page"]?.try &.to_i?
+ page ||= 1
+
+ params = HTTP::Params.parse(env.params.query["params"]? || "")
+
+ videos, notifications = get_subscription_feed(user, max_results, page)
+
+ XML.build(indent: " ", encoding: "UTF-8") do |xml|
+ xml.element("feed", "xmlns:yt": "http://www.youtube.com/xml/schemas/2015",
+ "xmlns:media": "http://search.yahoo.com/mrss/", xmlns: "http://www.w3.org/2005/Atom",
+ "xml:lang": "en-US") do
+ xml.element("link", "type": "text/html", rel: "alternate", href: "#{HOST_URL}/feed/subscriptions")
+ xml.element("link", "type": "application/atom+xml", rel: "self",
+ href: "#{HOST_URL}#{env.request.resource}")
+ xml.element("title") { xml.text translate(locale, "Invidious Private Feed for `x`", user.email) }
+
+ (notifications + videos).each do |video|
+ video.to_xml(locale, params, xml)
+ end
+ end
+ end
+ end
+
+ def self.rss_playlist(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ env.response.headers["Content-Type"] = "application/atom+xml"
+ env.response.content_type = "application/atom+xml"
+
+ plid = env.params.url["plid"]
+
+ params = HTTP::Params.parse(env.params.query["params"]? || "")
+ path = env.request.path
+
+ if plid.starts_with? "IV"
+ if playlist = Invidious::Database::Playlists.select(id: plid)
+ videos = get_playlist_videos(playlist, offset: 0)
+
+ return XML.build(indent: " ", encoding: "UTF-8") do |xml|
+ xml.element("feed", "xmlns:yt": "http://www.youtube.com/xml/schemas/2015",
+ "xmlns:media": "http://search.yahoo.com/mrss/", xmlns: "http://www.w3.org/2005/Atom",
+ "xml:lang": "en-US") do
+ xml.element("link", rel: "self", href: "#{HOST_URL}#{env.request.resource}")
+ xml.element("id") { xml.text "iv:playlist:#{plid}" }
+ xml.element("iv:playlistId") { xml.text plid }
+ xml.element("title") { xml.text playlist.title }
+ xml.element("link", rel: "alternate", href: "#{HOST_URL}/playlist?list=#{plid}")
+
+ xml.element("author") do
+ xml.element("name") { xml.text playlist.author }
+ end
+
+ videos.each &.to_xml(xml)
+ end
+ end
+ else
+ haltf env, status_code: 404
+ end
+ end
+
+ response = YT_POOL.client &.get("/feeds/videos.xml?playlist_id=#{plid}")
+ document = XML.parse(response.body)
+
+ document.xpath_nodes(%q(//*[@href]|//*[@url])).each do |node|
+ node.attributes.each do |attribute|
+ case attribute.name
+ when "url", "href"
+ request_target = URI.parse(node[attribute.name]).request_target
+ query_string_opt = request_target.starts_with?("/watch?v=") ? "&#{params}" : ""
+ node[attribute.name] = "#{HOST_URL}#{request_target}#{query_string_opt}"
+ else nil # Skip
+ end
+ end
+ end
+
+ document = document.to_xml(options: XML::SaveOptions::NO_DECL)
+
+ document.scan(/<uri>(?<url>[^<]+)<\/uri>/).each do |match|
+ content = "#{HOST_URL}#{URI.parse(match["url"]).request_target}"
+ document = document.gsub(match[0], "<uri>#{content}</uri>")
+ end
+ document
+ end
+
+ def self.rss_videos(env)
+ if ucid = env.params.query["channel_id"]?
+ env.redirect "/feed/channel/#{ucid}"
+ elsif user = env.params.query["user"]?
+ env.redirect "/feed/channel/#{user}"
+ elsif plid = env.params.query["playlist_id"]?
+ env.redirect "/feed/playlist/#{plid}"
+ end
+ end
+
+ # Push notifications via PubSub
+
+ def self.push_notifications_get(env)
+ verify_token = env.params.url["token"]
+
+ mode = env.params.query["hub.mode"]?
+ topic = env.params.query["hub.topic"]?
+ challenge = env.params.query["hub.challenge"]?
+
+ if !mode || !topic || !challenge
+ haltf env, status_code: 400
+ else
+ mode = mode.not_nil!
+ topic = topic.not_nil!
+ challenge = challenge.not_nil!
+ end
+
+ case verify_token
+ when .starts_with? "v1"
+ _, time, nonce, signature = verify_token.split(":")
+ data = "#{time}:#{nonce}"
+ when .starts_with? "v2"
+ time, signature = verify_token.split(":")
+ data = "#{time}"
+ else
+ haltf env, status_code: 400
+ end
+
+ # The hub will sometimes check if we're still subscribed after delivery errors,
+ # so we reply with a 200 as long as the request hasn't expired
+ if Time.utc.to_unix - time.to_i > 432000
+ haltf env, status_code: 400
+ end
+
+ if OpenSSL::HMAC.hexdigest(:sha1, HMAC_KEY, data) != signature
+ haltf env, status_code: 400
+ end
+
+ if ucid = HTTP::Params.parse(URI.parse(topic).query.not_nil!)["channel_id"]?
+ Invidious::Database::Channels.update_subscription_time(ucid)
+ elsif plid = HTTP::Params.parse(URI.parse(topic).query.not_nil!)["playlist_id"]?
+ Invidious::Database::Playlists.update_subscription_time(plid)
+ else
+ haltf env, status_code: 400
+ end
+
+ env.response.status_code = 200
+ challenge
+ end
+
+ def self.push_notifications_post(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ token = env.params.url["token"]
+ body = env.request.body.not_nil!.gets_to_end
+ signature = env.request.headers["X-Hub-Signature"].lchop("sha1=")
+
+ if signature != OpenSSL::HMAC.hexdigest(:sha1, HMAC_KEY, body)
+ LOGGER.error("/feed/webhook/#{token} : Invalid signature")
+ haltf env, status_code: 200
+ end
+
+ spawn do
+ # TODO: unify this with the other almost identical looking parts in this and channels.cr somehow?
+ namespaces = {
+ "yt" => "http://www.youtube.com/xml/schemas/2015",
+ "default" => "http://www.w3.org/2005/Atom",
+ }
+ rss = XML.parse(body)
+ rss.xpath_nodes("//default:feed/default:entry", namespaces).each do |entry|
+ id = entry.xpath_node("yt:videoId", namespaces).not_nil!.content
+ author = entry.xpath_node("default:author/default:name", namespaces).not_nil!.content
+ published = Time.parse_rfc3339(entry.xpath_node("default:published", namespaces).not_nil!.content)
+ updated = Time.parse_rfc3339(entry.xpath_node("default:updated", namespaces).not_nil!.content)
+
+ begin
+ video = get_video(id, force_refresh: true)
+ rescue
+ next # skip this video since it raised an exception (e.g. it is a scheduled live event)
+ end
+
+ if CONFIG.enable_user_notifications
+ # Deliver notifications to `/api/v1/auth/notifications`
+ payload = {
+ "topic" => video.ucid,
+ "videoId" => video.id,
+ "published" => published.to_unix,
+ }.to_json
+ PG_DB.exec("NOTIFY notifications, E'#{payload}'")
+ end
+
+ video = ChannelVideo.new({
+ id: id,
+ title: video.title,
+ published: published,
+ updated: updated,
+ ucid: video.ucid,
+ author: author,
+ length_seconds: video.length_seconds,
+ live_now: video.live_now,
+ premiere_timestamp: video.premiere_timestamp,
+ views: video.views,
+ })
+
+ was_insert = Invidious::Database::ChannelVideos.insert(video, with_premiere_timestamp: true)
+ if was_insert
+ if CONFIG.enable_user_notifications
+ Invidious::Database::Users.add_notification(video)
+ else
+ Invidious::Database::Users.feed_needs_update(video)
+ end
+ end
+ end
+ end
+
+ env.response.status_code = 200
+ end
+end
diff --git a/src/invidious/routes/images.cr b/src/invidious/routes/images.cr
new file mode 100644
index 00000000..639697db
--- /dev/null
+++ b/src/invidious/routes/images.cr
@@ -0,0 +1,153 @@
+module Invidious::Routes::Images
+ # Avatars, banners and other large image assets.
+ def self.ggpht(env)
+ url = env.request.path.lchop("/ggpht")
+
+ headers = HTTP::Headers.new
+
+ REQUEST_HEADERS_WHITELIST.each do |header|
+ if env.request.headers[header]?
+ headers[header] = env.request.headers[header]
+ end
+ end
+
+ begin
+ GGPHT_POOL.client &.get(url, headers) do |resp|
+ return self.proxy_image(env, resp)
+ end
+ rescue ex
+ end
+ end
+
+ def self.options_storyboard(env)
+ env.response.headers["Access-Control-Allow-Origin"] = "*"
+ env.response.headers["Access-Control-Allow-Methods"] = "GET, OPTIONS"
+ env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, Range"
+ end
+
+ def self.get_storyboard(env)
+ authority = env.params.url["authority"]
+ id = env.params.url["id"]
+ storyboard = env.params.url["storyboard"]
+ index = env.params.url["index"]
+
+ url = "/sb/#{id}/#{storyboard}/#{index}?#{env.params.query}"
+
+ headers = HTTP::Headers.new
+
+ REQUEST_HEADERS_WHITELIST.each do |header|
+ if env.request.headers[header]?
+ headers[header] = env.request.headers[header]
+ end
+ end
+
+ begin
+ get_ytimg_pool(authority).client &.get(url, headers) do |resp|
+ env.response.headers["Connection"] = "close"
+ return self.proxy_image(env, resp)
+ end
+ rescue ex
+ end
+ end
+
+ # ??? maybe also for storyboards?
+ def self.s_p_image(env)
+ id = env.params.url["id"]
+ name = env.params.url["name"]
+ url = env.request.resource
+
+ headers = HTTP::Headers.new
+
+ REQUEST_HEADERS_WHITELIST.each do |header|
+ if env.request.headers[header]?
+ headers[header] = env.request.headers[header]
+ end
+ end
+
+ begin
+ get_ytimg_pool("i9").client &.get(url, headers) do |resp|
+ return self.proxy_image(env, resp)
+ end
+ rescue ex
+ end
+ end
+
+ def self.yts_image(env)
+ headers = HTTP::Headers.new
+ REQUEST_HEADERS_WHITELIST.each do |header|
+ if env.request.headers[header]?
+ headers[header] = env.request.headers[header]
+ end
+ end
+
+ begin
+ YT_POOL.client &.get(env.request.resource, headers) do |response|
+ env.response.status_code = response.status_code
+ response.headers.each do |key, value|
+ if !RESPONSE_HEADERS_BLACKLIST.includes?(key.downcase)
+ env.response.headers[key] = value
+ end
+ end
+
+ env.response.headers["Access-Control-Allow-Origin"] = "*"
+
+ if response.status_code >= 300 && response.status_code != 404
+ env.response.headers.delete("Transfer-Encoding")
+ break
+ end
+
+ proxy_file(response, env)
+ end
+ rescue ex
+ end
+ end
+
+ def self.thumbnails(env)
+ id = env.params.url["id"]
+ name = env.params.url["name"]
+
+ headers = HTTP::Headers.new
+
+ if name == "maxres.jpg"
+ build_thumbnails(id).each do |thumb|
+ thumbnail_resource_path = "/vi/#{id}/#{thumb[:url]}.jpg"
+ if get_ytimg_pool("i9").client &.head(thumbnail_resource_path, headers).status_code == 200
+ name = thumb[:url] + ".jpg"
+ break
+ end
+ end
+ end
+
+ url = "/vi/#{id}/#{name}"
+
+ REQUEST_HEADERS_WHITELIST.each do |header|
+ if env.request.headers[header]?
+ headers[header] = env.request.headers[header]
+ end
+ end
+
+ begin
+ get_ytimg_pool("i").client &.get(url, headers) do |resp|
+ return self.proxy_image(env, resp)
+ end
+ rescue ex
+ end
+ end
+
+ private def self.proxy_image(env, response)
+ env.response.status_code = response.status_code
+ response.headers.each do |key, value|
+ if !RESPONSE_HEADERS_BLACKLIST.includes?(key.downcase)
+ env.response.headers[key] = value
+ end
+ end
+
+ env.response.headers["Access-Control-Allow-Origin"] = "*"
+
+ if response.status_code >= 300
+ return env.response.headers.delete("Transfer-Encoding")
+ end
+
+ return proxy_file(response, env)
+ end
+end
diff --git a/src/invidious/routes/login.cr b/src/invidious/routes/login.cr
index ffe5f568..d0f7ac22 100644
--- a/src/invidious/routes/login.cr
+++ b/src/invidious/routes/login.cr
@@ -1,17 +1,19 @@
-class Invidious::Routes::Login < Invidious::Routes::BaseRoute
- def login_page(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+{% skip_file if flag?(:api_only) %}
+
+module Invidious::Routes::Login
+ def self.login_page(env)
+ locale = env.get("preferences").as(Preferences).locale
user = env.get? "user"
- return env.redirect "/feed/subscriptions" if user
+ referer = get_referer(env, "/feed/subscriptions")
+
+ return env.redirect referer if user
if !CONFIG.login_enabled
return error_template(400, "Login has been disabled by administrator.")
end
- referer = get_referer(env, "/feed/subscriptions")
-
email = nil
password = nil
captcha = nil
@@ -22,14 +24,11 @@ class Invidious::Routes::Login < Invidious::Routes::BaseRoute
captcha_type = env.params.query["captcha"]?
captcha_type ||= "image"
- tfa = env.params.query["tfa"]?
- prompt = nil
-
- templated "login"
+ templated "user/login"
end
- def login(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ def self.login(env)
+ locale = env.get("preferences").as(Preferences).locale
referer = get_referer(env, "/feed/subscriptions")
@@ -45,304 +44,23 @@ class Invidious::Routes::Login < Invidious::Routes::BaseRoute
account_type ||= "invidious"
case account_type
- when "google"
- tfa_code = env.params.body["tfa"]?.try &.lchop("G-")
- traceback = IO::Memory.new
-
- # See https://github.com/ytdl-org/youtube-dl/blob/2019.04.07/youtube_dl/extractor/youtube.py#L82
- begin
- client = QUIC::Client.new(LOGIN_URL)
- headers = HTTP::Headers.new
-
- login_page = client.get("/ServiceLogin")
- headers = login_page.cookies.add_request_headers(headers)
-
- lookup_req = {
- email, nil, [] of String, nil, "US", nil, nil, 2, false, true,
- {nil, nil,
- {2, 1, nil, 1,
- "https://accounts.google.com/ServiceLogin?passive=true&continue=https%3A%2F%2Fwww.youtube.com%2Fsignin%3Fnext%3D%252F%26action_handle_signin%3Dtrue%26hl%3Den%26app%3Ddesktop%26feature%3Dsign_in_button&hl=en&service=youtube&uilel=3&requestPath=%2FServiceLogin&Page=PasswordSeparationSignIn",
- nil, [] of String, 4},
- 1,
- {nil, nil, [] of String},
- nil, nil, nil, true,
- },
- email,
- }.to_json
-
- traceback << "Getting lookup..."
-
- headers["Content-Type"] = "application/x-www-form-urlencoded;charset=utf-8"
- headers["Google-Accounts-XSRF"] = "1"
-
- response = client.post("/_/signin/sl/lookup", headers, login_req(lookup_req))
- lookup_results = JSON.parse(response.body[5..-1])
-
- traceback << "done, returned #{response.status_code}.<br/>"
-
- user_hash = lookup_results[0][2]
-
- if token = env.params.body["token"]?
- answer = env.params.body["answer"]?
- captcha = {token, answer}
- else
- captcha = nil
- end
-
- challenge_req = {
- user_hash, nil, 1, nil,
- {1, nil, nil, nil,
- {password, captcha, true},
- },
- {nil, nil,
- {2, 1, nil, 1,
- "https://accounts.google.com/ServiceLogin?passive=true&continue=https%3A%2F%2Fwww.youtube.com%2Fsignin%3Fnext%3D%252F%26action_handle_signin%3Dtrue%26hl%3Den%26app%3Ddesktop%26feature%3Dsign_in_button&hl=en&service=youtube&uilel=3&requestPath=%2FServiceLogin&Page=PasswordSeparationSignIn",
- nil, [] of String, 4},
- 1,
- {nil, nil, [] of String},
- nil, nil, nil, true,
- },
- }.to_json
-
- traceback << "Getting challenge..."
-
- response = client.post("/_/signin/sl/challenge", headers, login_req(challenge_req))
- headers = response.cookies.add_request_headers(headers)
- challenge_results = JSON.parse(response.body[5..-1])
-
- traceback << "done, returned #{response.status_code}.<br/>"
-
- headers["Cookie"] = URI.decode_www_form(headers["Cookie"])
-
- if challenge_results[0][3]?.try &.== 7
- return error_template(423, "Account has temporarily been disabled")
- end
-
- if token = challenge_results[0][-1]?.try &.[-1]?.try &.as_h?.try &.["5001"]?.try &.[-1].as_a?.try &.[-1].as_s
- account_type = "google"
- captcha_type = "image"
- prompt = nil
- tfa = tfa_code
- captcha = {tokens: [token], question: ""}
-
- return templated "login"
- end
-
- if challenge_results[0][-1]?.try &.[5] == "INCORRECT_ANSWER_ENTERED"
- return error_template(401, "Incorrect password")
- end
-
- prompt_type = challenge_results[0][-1]?.try &.[0].as_a?.try &.[0][2]?
- if {"TWO_STEP_VERIFICATION", "LOGIN_CHALLENGE"}.includes? prompt_type
- traceback << "Handling prompt #{prompt_type}.<br/>"
- case prompt_type
- when "TWO_STEP_VERIFICATION"
- prompt_type = 2
- else # "LOGIN_CHALLENGE"
- prompt_type = 4
- end
-
- # Prefer Authenticator app and SMS over unsupported protocols
- if !{6, 9, 12, 15}.includes?(challenge_results[0][-1][0][0][8].as_i) && prompt_type == 2
- tfa = challenge_results[0][-1][0].as_a.select { |auth_type| {6, 9, 12, 15}.includes? auth_type[8] }[0]
-
- traceback << "Selecting challenge #{tfa[8]}..."
- select_challenge = {prompt_type, nil, nil, nil, {tfa[8]}}.to_json
-
- tl = challenge_results[1][2]
-
- tfa = client.post("/_/signin/selectchallenge?TL=#{tl}", headers, login_req(select_challenge)).body
- tfa = tfa[5..-1]
- tfa = JSON.parse(tfa)[0][-1]
-
- traceback << "done.<br/>"
- else
- traceback << "Using challenge #{challenge_results[0][-1][0][0][8]}.<br/>"
- tfa = challenge_results[0][-1][0][0]
- end
-
- if tfa[5] == "QUOTA_EXCEEDED"
- return error_template(423, "Quota exceeded, try again in a few hours")
- end
-
- if !tfa_code
- account_type = "google"
- captcha_type = "image"
-
- case tfa[8]
- when 6, 9
- prompt = "Google verification code"
- when 12
- prompt = "Login verification, recovery email: #{tfa[-1][tfa[-1].as_h.keys[0]][0]}"
- when 15
- prompt = "Login verification, security question: #{tfa[-1][tfa[-1].as_h.keys[0]][0]}"
- else
- prompt = "Google verification code"
- end
-
- tfa = nil
- captcha = nil
- return templated "login"
- end
-
- tl = challenge_results[1][2]
-
- request_type = tfa[8]
- case request_type
- when 6 # Authenticator app
- tfa_req = {
- user_hash, nil, 2, nil,
- {6, nil, nil, nil, nil,
- {tfa_code, false},
- },
- }.to_json
- when 9 # Voice or text message
- tfa_req = {
- user_hash, nil, 2, nil,
- {9, nil, nil, nil, nil, nil, nil, nil,
- {nil, tfa_code, false, 2},
- },
- }.to_json
- when 12 # Recovery email
- tfa_req = {
- user_hash, nil, 4, nil,
- {12, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil,
- {tfa_code},
- },
- }.to_json
- when 15 # Security question
- tfa_req = {
- user_hash, nil, 5, nil,
- {15, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil, nil,
- {tfa_code},
- },
- }.to_json
- else
- return error_template(500, "Unable to log in, make sure two-factor authentication (Authenticator or SMS) is turned on.")
- end
-
- traceback << "Submitting challenge..."
-
- response = client.post("/_/signin/challenge?hl=en&TL=#{tl}", headers, login_req(tfa_req))
- headers = response.cookies.add_request_headers(headers)
- challenge_results = JSON.parse(response.body[5..-1])
-
- if (challenge_results[0][-1]?.try &.[5] == "INCORRECT_ANSWER_ENTERED") ||
- (challenge_results[0][-1]?.try &.[5] == "INVALID_INPUT")
- return error_template(401, "Invalid TFA code")
- end
-
- traceback << "done.<br/>"
- end
-
- traceback << "Logging in..."
-
- location = URI.parse(challenge_results[0][-1][2].to_s)
- cookies = HTTP::Cookies.from_headers(headers)
-
- headers.delete("Content-Type")
- headers.delete("Google-Accounts-XSRF")
-
- loop do
- if !location || location.path == "/ManageAccount"
- break
- end
-
- # Occasionally there will be a second page after login confirming
- # the user's phone number ("/b/0/SmsAuthInterstitial"), which we currently don't handle.
-
- if location.path.starts_with? "/b/0/SmsAuthInterstitial"
- traceback << "Unhandled dialog /b/0/SmsAuthInterstitial."
- end
-
- login = client.get(location.request_target, headers)
-
- headers = login.cookies.add_request_headers(headers)
- location = login.headers["Location"]?.try { |u| URI.parse(u) }
- end
-
- cookies = HTTP::Cookies.from_headers(headers)
- sid = cookies["SID"]?.try &.value
- if !sid
- raise "Couldn't get SID."
- end
-
- user, sid = get_user(sid, headers, PG_DB)
-
- # We are now logged in
- traceback << "done.<br/>"
-
- host = URI.parse(env.request.headers["Host"]).host
-
- if Kemal.config.ssl || CONFIG.https_only
- secure = true
- else
- secure = false
- end
-
- cookies.each do |cookie|
- if Kemal.config.ssl || CONFIG.https_only
- cookie.secure = secure
- else
- cookie.secure = secure
- end
-
- if cookie.extension
- cookie.extension = cookie.extension.not_nil!.gsub(".youtube.com", host)
- cookie.extension = cookie.extension.not_nil!.gsub("Secure; ", "")
- end
- env.response.cookies << cookie
- end
-
- if env.request.cookies["PREFS"]?
- preferences = env.get("preferences").as(Preferences)
- PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", preferences.to_json, user.email)
-
- cookie = env.request.cookies["PREFS"]
- cookie.expires = Time.utc(1990, 1, 1)
- env.response.cookies << cookie
- end
-
- env.redirect referer
- rescue ex
- traceback.rewind
- # error_message = translate(locale, "Login failed. This may be because two-factor authentication is not turned on for your account.")
- error_message = %(#{ex.message}<br/>Traceback:<br/><div style="padding-left:2em" id="traceback">#{traceback.gets_to_end}</div>)
- return error_template(500, error_message)
- end
when "invidious"
- if !email
+ if email.nil? || email.empty?
return error_template(401, "User ID is a required field")
end
- if !password
+ if password.nil? || password.empty?
return error_template(401, "Password is a required field")
end
- user = PG_DB.query_one?("SELECT * FROM users WHERE email = $1", email, as: User)
+ user = Invidious::Database::Users.select(email: email)
if user
- if !user.password
- return error_template(400, "Please sign in using 'Log in with Google'")
- end
-
if Crypto::Bcrypt::Password.new(user.password.not_nil!).verify(password.byte_slice(0, 55))
sid = Base64.urlsafe_encode(Random::Secure.random_bytes(32))
- PG_DB.exec("INSERT INTO session_ids VALUES ($1, $2, $3)", sid, email, Time.utc)
-
- if Kemal.config.ssl || CONFIG.https_only
- secure = true
- else
- secure = false
- end
+ Invidious::Database::SessionIDs.insert(sid, email)
- if CONFIG.domain
- env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", domain: "#{CONFIG.domain}", value: sid, expires: Time.utc + 2.years,
- secure: secure, http_only: true)
- else
- env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", value: sid, expires: Time.utc + 2.years,
- secure: secure, http_only: true)
- end
+ env.response.cookies["SID"] = Invidious::User::Cookies.sid(CONFIG.domain, sid)
else
return error_template(401, "Wrong username or password")
end
@@ -381,19 +99,17 @@ class Invidious::Routes::Login < Invidious::Routes::BaseRoute
captcha_type ||= "image"
account_type = "invidious"
- tfa = false
- prompt = ""
if captcha_type == "image"
- captcha = generate_captcha(HMAC_KEY, PG_DB)
+ captcha = Invidious::User::Captcha.generate_image(HMAC_KEY)
else
- captcha = generate_text_captcha(HMAC_KEY, PG_DB)
+ captcha = Invidious::User::Captcha.generate_text(HMAC_KEY)
end
- return templated "login"
+ return templated "user/login"
end
- tokens = env.params.body.select { |k, v| k.match(/^token\[\d+\]$/) }.map { |k, v| v }
+ tokens = env.params.body.select { |k, _| k.match(/^token\[\d+\]$/) }.map { |_, v| v }
answer ||= ""
captcha_type ||= "image"
@@ -404,7 +120,7 @@ class Invidious::Routes::Login < Invidious::Routes::BaseRoute
answer = OpenSSL::HMAC.hexdigest(:sha256, HMAC_KEY, answer)
begin
- validate_request(tokens[0], answer, env.request, HMAC_KEY, PG_DB, locale)
+ validate_request(tokens[0], answer, env.request, HMAC_KEY, locale)
rescue ex
return error_template(400, ex)
end
@@ -417,9 +133,9 @@ class Invidious::Routes::Login < Invidious::Routes::BaseRoute
found_valid_captcha = false
error_exception = Exception.new
- tokens.each_with_index do |token, i|
+ tokens.each do |tok|
begin
- validate_request(token, answer, env.request, HMAC_KEY, PG_DB, locale)
+ validate_request(tok, answer, env.request, HMAC_KEY, locale)
found_valid_captcha = true
rescue ex
error_exception = ex
@@ -434,34 +150,24 @@ class Invidious::Routes::Login < Invidious::Routes::BaseRoute
sid = Base64.urlsafe_encode(Random::Secure.random_bytes(32))
user, sid = create_user(sid, email, password)
- user_array = user.to_a
- user_array[4] = user_array[4].to_json # User preferences
- args = arg_array(user_array)
+ if language_header = env.request.headers["Accept-Language"]?
+ if language = ANG.language_negotiator.best(language_header, LOCALES.keys)
+ user.preferences.locale = language.header
+ end
+ end
- PG_DB.exec("INSERT INTO users VALUES (#{args})", args: user_array)
- PG_DB.exec("INSERT INTO session_ids VALUES ($1, $2, $3)", sid, email, Time.utc)
+ Invidious::Database::Users.insert(user)
+ Invidious::Database::SessionIDs.insert(sid, email)
view_name = "subscriptions_#{sha256(user.email)}"
PG_DB.exec("CREATE MATERIALIZED VIEW #{view_name} AS #{MATERIALIZED_VIEW_SQL.call(user.email)}")
- if Kemal.config.ssl || CONFIG.https_only
- secure = true
- else
- secure = false
- end
-
- if CONFIG.domain
- env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", domain: "#{CONFIG.domain}", value: sid, expires: Time.utc + 2.years,
- secure: secure, http_only: true)
- else
- env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", value: sid, expires: Time.utc + 2.years,
- secure: secure, http_only: true)
- end
+ env.response.cookies["SID"] = Invidious::User::Cookies.sid(CONFIG.domain, sid)
if env.request.cookies["PREFS"]?
- preferences = env.get("preferences").as(Preferences)
- PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", preferences.to_json, user.email)
+ user.preferences = env.get("preferences").as(Preferences)
+ Invidious::Database::Users.update_preferences(user)
cookie = env.request.cookies["PREFS"]
cookie.expires = Time.utc(1990, 1, 1)
@@ -475,8 +181,8 @@ class Invidious::Routes::Login < Invidious::Routes::BaseRoute
end
end
- def signout(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ def self.signout(env)
+ locale = env.get("preferences").as(Preferences).locale
user = env.get? "user"
sid = env.get? "sid"
@@ -491,12 +197,12 @@ class Invidious::Routes::Login < Invidious::Routes::BaseRoute
token = env.params.body["csrf_token"]?
begin
- validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
+ validate_request(token, sid, env.request, HMAC_KEY, locale)
rescue ex
return error_template(400, ex)
end
- PG_DB.exec("DELETE FROM session_ids * WHERE id = $1", sid)
+ Invidious::Database::SessionIDs.delete(sid: sid)
env.request.cookies.each do |cookie|
cookie.expires = Time.utc(1990, 1, 1)
diff --git a/src/invidious/routes/misc.cr b/src/invidious/routes/misc.cr
index d32ba892..8b620d63 100644
--- a/src/invidious/routes/misc.cr
+++ b/src/invidious/routes/misc.cr
@@ -1,7 +1,9 @@
-class Invidious::Routes::Misc < Invidious::Routes::BaseRoute
- def home(env)
+{% skip_file if flag?(:api_only) %}
+
+module Invidious::Routes::Misc
+ def self.home(env)
preferences = env.get("preferences").as(Preferences)
- locale = LOCALES[preferences.locale]?
+ locale = preferences.locale
user = env.get? "user"
case preferences.default_home
@@ -17,7 +19,7 @@ class Invidious::Routes::Misc < Invidious::Routes::BaseRoute
end
when "Playlists"
if user
- env.redirect "/view_all_playlists"
+ env.redirect "/feed/playlists"
else
env.redirect "/feed/popular"
end
@@ -26,13 +28,28 @@ class Invidious::Routes::Misc < Invidious::Routes::BaseRoute
end
end
- def privacy(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ def self.privacy(env)
+ locale = env.get("preferences").as(Preferences).locale
templated "privacy"
end
- def licenses(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ def self.licenses(env)
+ locale = env.get("preferences").as(Preferences).locale
rendered "licenses"
end
+
+ def self.cross_instance_redirect(env)
+ referer = get_referer(env)
+
+ instance_list = Invidious::Jobs::InstanceListRefreshJob::INSTANCES["INSTANCES"]
+ if instance_list.empty?
+ instance_url = "redirect.invidious.io"
+ else
+ # Sample returns an array
+ # Instances are packaged as {region, domain} in the instance list
+ instance_url = instance_list.sample(1)[0][1]
+ end
+
+ env.redirect "https://#{instance_url}#{referer}"
+ end
end
diff --git a/src/invidious/routes/notifications.cr b/src/invidious/routes/notifications.cr
new file mode 100644
index 00000000..8922b740
--- /dev/null
+++ b/src/invidious/routes/notifications.cr
@@ -0,0 +1,34 @@
+module Invidious::Routes::Notifications
+ # /modify_notifications
+ # will "ding" all subscriptions.
+ # /modify_notifications?receive_all_updates=false&receive_no_updates=false
+ # will "unding" all subscriptions.
+ def self.modify(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ sid = env.get? "sid"
+ referer = get_referer(env, "/")
+
+ redirect = env.params.query["redirect"]?
+ redirect ||= "false"
+ redirect = redirect == "true"
+
+ if !user
+ if redirect
+ return env.redirect referer
+ else
+ return error_json(403, "No such user")
+ end
+ end
+
+ user = user.as(User)
+
+ if redirect
+ env.redirect referer
+ else
+ env.response.content_type = "application/json"
+ "{}"
+ end
+ end
+end
diff --git a/src/invidious/routes/playlists.cr b/src/invidious/routes/playlists.cr
index 1f7fa27d..9c6843e9 100644
--- a/src/invidious/routes/playlists.cr
+++ b/src/invidious/routes/playlists.cr
@@ -1,31 +1,8 @@
-class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
- def index(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+{% skip_file if flag?(:api_only) %}
- user = env.get? "user"
- referer = get_referer(env)
-
- return env.redirect "/" if user.nil?
-
- user = user.as(User)
-
- items_created = PG_DB.query_all("SELECT * FROM playlists WHERE author = $1 AND id LIKE 'IV%' ORDER BY created", user.email, as: InvidiousPlaylist)
- items_created.map! do |item|
- item.author = ""
- item
- end
-
- items_saved = PG_DB.query_all("SELECT * FROM playlists WHERE author = $1 AND id NOT LIKE 'IV%' ORDER BY created", user.email, as: InvidiousPlaylist)
- items_saved.map! do |item|
- item.author = ""
- item
- end
-
- templated "view_all_playlists"
- end
-
- def new(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+module Invidious::Routes::Playlists
+ def self.new(env)
+ locale = env.get("preferences").as(Preferences).locale
user = env.get? "user"
sid = env.get? "sid"
@@ -35,13 +12,13 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
user = user.as(User)
sid = sid.as(String)
- csrf_token = generate_response(sid, {":create_playlist"}, HMAC_KEY, PG_DB)
+ csrf_token = generate_response(sid, {":create_playlist"}, HMAC_KEY)
templated "create_playlist"
end
- def create(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ def self.create(env)
+ locale = env.get("preferences").as(Preferences).locale
user = env.get? "user"
sid = env.get? "sid"
@@ -54,7 +31,7 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
token = env.params.body["csrf_token"]?
begin
- validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
+ validate_request(token, sid, env.request, HMAC_KEY, locale)
rescue ex
return error_template(400, ex)
end
@@ -69,17 +46,17 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
return error_template(400, "Invalid privacy setting.")
end
- if PG_DB.query_one("SELECT count(*) FROM playlists WHERE author = $1", user.email, as: Int64) >= 100
+ if Invidious::Database::Playlists.count_owned_by(user.email) >= 100
return error_template(400, "User cannot have more than 100 playlists.")
end
- playlist = create_playlist(PG_DB, title, privacy, user)
+ playlist = create_playlist(title, privacy, user)
env.redirect "/playlist?list=#{playlist.id}"
end
- def subscribe(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ def self.subscribe(env)
+ locale = env.get("preferences").as(Preferences).locale
user = env.get? "user"
referer = get_referer(env)
@@ -89,14 +66,20 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
user = user.as(User)
playlist_id = env.params.query["list"]
- playlist = get_playlist(PG_DB, playlist_id, locale)
- subscribe_playlist(PG_DB, user, playlist)
+ begin
+ playlist = get_playlist(playlist_id)
+ rescue ex : NotFoundException
+ return error_template(404, ex)
+ rescue ex
+ return error_template(500, ex)
+ end
+ subscribe_playlist(user, playlist)
env.redirect "/playlist?list=#{playlist.id}"
end
- def delete_page(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ def self.delete_page(env)
+ locale = env.get("preferences").as(Preferences).locale
user = env.get? "user"
sid = env.get? "sid"
@@ -108,18 +91,22 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
sid = sid.as(String)
plid = env.params.query["list"]?
- playlist = PG_DB.query_one?("SELECT * FROM playlists WHERE id = $1", plid, as: InvidiousPlaylist)
+ if !plid || plid.empty?
+ return error_template(400, "A playlist ID is required")
+ end
+
+ playlist = Invidious::Database::Playlists.select(id: plid)
if !playlist || playlist.author != user.email
return env.redirect referer
end
- csrf_token = generate_response(sid, {":delete_playlist"}, HMAC_KEY, PG_DB)
+ csrf_token = generate_response(sid, {":delete_playlist"}, HMAC_KEY)
templated "delete_playlist"
end
- def delete(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ def self.delete(env)
+ locale = env.get("preferences").as(Preferences).locale
user = env.get? "user"
sid = env.get? "sid"
@@ -135,24 +122,23 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
token = env.params.body["csrf_token"]?
begin
- validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
+ validate_request(token, sid, env.request, HMAC_KEY, locale)
rescue ex
return error_template(400, ex)
end
- playlist = PG_DB.query_one?("SELECT * FROM playlists WHERE id = $1", plid, as: InvidiousPlaylist)
+ playlist = Invidious::Database::Playlists.select(id: plid)
if !playlist || playlist.author != user.email
return env.redirect referer
end
- PG_DB.exec("DELETE FROM playlist_videos * WHERE plid = $1", plid)
- PG_DB.exec("DELETE FROM playlists * WHERE id = $1", plid)
+ Invidious::Database::Playlists.delete(plid)
- env.redirect "/view_all_playlists"
+ env.redirect "/feed/playlists"
end
- def edit(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ def self.edit(env)
+ locale = env.get("preferences").as(Preferences).locale
user = env.get? "user"
sid = env.get? "sid"
@@ -171,28 +157,31 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
page = env.params.query["page"]?.try &.to_i?
page ||= 1
- begin
- playlist = PG_DB.query_one("SELECT * FROM playlists WHERE id = $1", plid, as: InvidiousPlaylist)
- if !playlist || playlist.author != user.email
- return env.redirect referer
- end
- rescue ex
+ playlist = Invidious::Database::Playlists.select(id: plid)
+ if !playlist || playlist.author != user.email
return env.redirect referer
end
begin
- videos = get_playlist_videos(PG_DB, playlist, offset: (page - 1) * 100, locale: locale)
+ items = get_playlist_videos(playlist, offset: (page - 1) * 100)
rescue ex
- videos = [] of PlaylistVideo
+ items = [] of PlaylistVideo
end
- csrf_token = generate_response(sid, {":edit_playlist"}, HMAC_KEY, PG_DB)
+ csrf_token = generate_response(sid, {":edit_playlist"}, HMAC_KEY)
+
+ # Pagination
+ page_nav_html = Frontend::Pagination.nav_numeric(locale,
+ base_url: "/playlist?list=#{playlist.id}",
+ current_page: page,
+ show_next: (items.size == 100)
+ )
templated "edit_playlist"
end
- def update(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ def self.update(env)
+ locale = env.get("preferences").as(Preferences).locale
user = env.get? "user"
sid = env.get? "sid"
@@ -208,12 +197,12 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
token = env.params.body["csrf_token"]?
begin
- validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
+ validate_request(token, sid, env.request, HMAC_KEY, locale)
rescue ex
return error_template(400, ex)
end
- playlist = PG_DB.query_one?("SELECT * FROM playlists WHERE id = $1", plid, as: InvidiousPlaylist)
+ playlist = Invidious::Database::Playlists.select(id: plid)
if !playlist || playlist.author != user.email
return env.redirect referer
end
@@ -230,13 +219,16 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
updated = playlist.updated
end
- PG_DB.exec("UPDATE playlists SET title = $1, privacy = $2, description = $3, updated = $4 WHERE id = $5", title, privacy, description, updated, plid)
+ Invidious::Database::Playlists.update(plid, title, privacy, description, updated)
env.redirect "/playlist?list=#{plid}"
end
- def add_playlist_items_page(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ def self.add_playlist_items_page(env)
+ prefs = env.get("preferences").as(Preferences)
+ locale = prefs.locale
+
+ region = env.params.query["region"]? || prefs.region
user = env.get? "user"
sid = env.get? "sid"
@@ -255,35 +247,32 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
page = env.params.query["page"]?.try &.to_i?
page ||= 1
- begin
- playlist = PG_DB.query_one("SELECT * FROM playlists WHERE id = $1", plid, as: InvidiousPlaylist)
- if !playlist || playlist.author != user.email
- return env.redirect referer
- end
- rescue ex
+ playlist = Invidious::Database::Playlists.select(id: plid)
+ if !playlist || playlist.author != user.email
return env.redirect referer
end
- query = env.params.query["q"]?
- if query
- begin
- search_query, count, items, operators = process_search_query(query, page, user, region: nil)
- videos = items.select { |item| item.is_a? SearchVideo }.map { |item| item.as(SearchVideo) }
- rescue ex
- videos = [] of SearchVideo
- count = 0
- end
- else
- videos = [] of SearchVideo
- count = 0
+ begin
+ query = Invidious::Search::Query.new(env.params.query, :playlist, region)
+ items = query.process.select(SearchVideo).map(&.as(SearchVideo))
+ rescue ex
+ items = [] of SearchVideo
end
+ # Pagination
+ query_encoded = URI.encode_www_form(query.try &.text || "", space_to_plus: true)
+ page_nav_html = Frontend::Pagination.nav_numeric(locale,
+ base_url: "/add_playlist_items?list=#{playlist.id}&q=#{query_encoded}",
+ current_page: page,
+ show_next: (items.size >= 20)
+ )
+
env.set "add_playlist_items", plid
templated "add_playlist_items"
end
- def playlist_ajax(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ def self.playlist_ajax(env)
+ locale = env.get("preferences").as(Preferences).locale
user = env.get? "user"
sid = env.get? "sid"
@@ -306,7 +295,7 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
token = env.params.body["csrf_token"]?
begin
- validate_request(token, sid, env.request, HMAC_KEY, PG_DB, locale)
+ validate_request(token, sid, env.request, HMAC_KEY, locale)
rescue ex
if redirect
return error_template(400, ex)
@@ -334,8 +323,10 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
begin
playlist_id = env.params.query["playlist_id"]
- playlist = get_playlist(PG_DB, playlist_id, locale).as(InvidiousPlaylist)
+ playlist = get_playlist(playlist_id).as(InvidiousPlaylist)
raise "Invalid user" if playlist.author != user.email
+ rescue ex : NotFoundException
+ return error_json(404, ex)
rescue ex
if redirect
return error_template(400, ex)
@@ -344,28 +335,26 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
end
end
- if !user.password
- # TODO: Playlist stub, sync with YouTube for Google accounts
- # playlist_ajax(playlist_id, action, env.request.headers)
- end
email = user.email
case action
when "action_edit_playlist"
# TODO: Playlist stub
when "action_add_video"
- if playlist.index.size >= 500
+ if playlist.index.size >= CONFIG.playlist_length_limit
if redirect
- return error_template(400, "Playlist cannot have more than 500 videos")
+ return error_template(400, "Playlist cannot have more than #{CONFIG.playlist_length_limit} videos")
else
- return error_json(400, "Playlist cannot have more than 500 videos")
+ return error_json(400, "Playlist cannot have more than #{CONFIG.playlist_length_limit} videos")
end
end
video_id = env.params.query["video_id"]
begin
- video = get_video(video_id, PG_DB)
+ video = get_video(video_id)
+ rescue ex : NotFoundException
+ return error_json(404, ex)
rescue ex
if redirect
return error_template(500, ex)
@@ -386,15 +375,12 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
index: Random::Secure.rand(0_i64..Int64::MAX),
})
- video_array = playlist_video.to_a
- args = arg_array(video_array)
-
- PG_DB.exec("INSERT INTO playlist_videos VALUES (#{args})", args: video_array)
- PG_DB.exec("UPDATE playlists SET index = array_append(index, $1), video_count = cardinality(index) + 1, updated = $2 WHERE id = $3", playlist_video.index, Time.utc, playlist_id)
+ Invidious::Database::PlaylistVideos.insert(playlist_video)
+ Invidious::Database::Playlists.update_video_added(playlist_id, playlist_video.index)
when "action_remove_video"
index = env.params.query["set_video_id"]
- PG_DB.exec("DELETE FROM playlist_videos * WHERE index = $1", index)
- PG_DB.exec("UPDATE playlists SET index = array_remove(index, $1), video_count = cardinality(index) - 1, updated = $2 WHERE id = $3", index, Time.utc, playlist_id)
+ Invidious::Database::PlaylistVideos.delete(index)
+ Invidious::Database::Playlists.update_video_removed(playlist_id, index)
when "action_move_video_before"
# TODO: Playlist stub
else
@@ -409,8 +395,8 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
end
end
- def show(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ def self.show(env)
+ locale = env.get("preferences").as(Preferences).locale
user = env.get?("user").try &.as(User)
referer = get_referer(env)
@@ -428,13 +414,20 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
end
begin
- playlist = get_playlist(PG_DB, plid, locale)
+ playlist = get_playlist(plid)
+ rescue ex : NotFoundException
+ return error_template(404, ex)
rescue ex
return error_template(500, ex)
end
- page_count = (playlist.video_count / 100).to_i
- page_count += 1 if (playlist.video_count % 100) > 0
+ if playlist.is_a? InvidiousPlaylist
+ page_count = (playlist.video_count / 100).to_i
+ page_count += 1 if (playlist.video_count % 100) > 0
+ else
+ page_count = (playlist.video_count / 200).to_i
+ page_count += 1 if (playlist.video_count % 200) > 0
+ end
if page > page_count
return env.redirect "/playlist?list=#{plid}&page=#{page_count}"
@@ -445,7 +438,11 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
end
begin
- videos = get_playlist_videos(PG_DB, playlist, offset: (page - 1) * 100, locale: locale)
+ if playlist.is_a? InvidiousPlaylist
+ items = get_playlist_videos(playlist, offset: (page - 1) * 100)
+ else
+ items = get_playlist_videos(playlist, offset: (page - 1) * 200)
+ end
rescue ex
return error_template(500, "Error encountered while retrieving playlist videos.<br>#{ex.message}")
end
@@ -454,11 +451,18 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
env.set "remove_playlist_items", plid
end
+ # Pagination
+ page_nav_html = Frontend::Pagination.nav_numeric(locale,
+ base_url: "/playlist?list=#{playlist.id}",
+ current_page: page,
+ show_next: (page_count != 1 && page < page_count)
+ )
+
templated "playlist"
end
- def mix(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ def self.mix(env)
+ locale = env.get("preferences").as(Preferences).locale
rdid = env.params.query["list"]?
if !rdid
@@ -476,4 +480,15 @@ class Invidious::Routes::Playlists < Invidious::Routes::BaseRoute
templated "mix"
end
+
+ # Undocumented, creates anonymous playlist with specified 'video_ids', max 50 videos
+ def self.watch_videos(env)
+ response = YT_POOL.client &.get(env.request.resource)
+ if url = response.headers["Location"]?
+ url = URI.parse(url).request_target
+ return env.redirect url
+ end
+
+ env.response.status_code = response.status_code
+ end
end
diff --git a/src/invidious/routes/preferences.cr b/src/invidious/routes/preferences.cr
index 4901d22b..39ca77c0 100644
--- a/src/invidious/routes/preferences.cr
+++ b/src/invidious/routes/preferences.cr
@@ -1,16 +1,18 @@
-class Invidious::Routes::PreferencesRoute < Invidious::Routes::BaseRoute
- def show(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+{% skip_file if flag?(:api_only) %}
+
+module Invidious::Routes::PreferencesRoute
+ def self.show(env)
+ locale = env.get("preferences").as(Preferences).locale
referer = get_referer(env)
preferences = env.get("preferences").as(Preferences)
- templated "preferences"
+ templated "user/preferences"
end
- def update(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ def self.update(env)
+ locale = env.get("preferences").as(Preferences).locale
referer = get_referer(env)
video_loop = env.params.body["video_loop"]?.try &.as(String)
@@ -25,6 +27,10 @@ class Invidious::Routes::PreferencesRoute < Invidious::Routes::BaseRoute
annotations_subscribed ||= "off"
annotations_subscribed = annotations_subscribed == "on"
+ preload = env.params.body["preload"]?.try &.as(String)
+ preload ||= "off"
+ preload = preload == "on"
+
autoplay = env.params.body["autoplay"]?.try &.as(String)
autoplay ||= "off"
autoplay = autoplay == "on"
@@ -45,6 +51,10 @@ class Invidious::Routes::PreferencesRoute < Invidious::Routes::BaseRoute
local ||= "off"
local = local == "on"
+ watch_history = env.params.body["watch_history"]?.try &.as(String)
+ watch_history ||= "off"
+ watch_history = watch_history == "on"
+
speed = env.params.body["speed"]?.try &.as(String).to_f32?
speed ||= CONFIG.default_user_preferences.speed
@@ -60,6 +70,22 @@ class Invidious::Routes::PreferencesRoute < Invidious::Routes::BaseRoute
volume = env.params.body["volume"]?.try &.as(String).to_i?
volume ||= CONFIG.default_user_preferences.volume
+ extend_desc = env.params.body["extend_desc"]?.try &.as(String)
+ extend_desc ||= "off"
+ extend_desc = extend_desc == "on"
+
+ vr_mode = env.params.body["vr_mode"]?.try &.as(String)
+ vr_mode ||= "off"
+ vr_mode = vr_mode == "on"
+
+ save_player_pos = env.params.body["save_player_pos"]?.try &.as(String)
+ save_player_pos ||= "off"
+ save_player_pos = save_player_pos == "on"
+
+ show_nick = env.params.body["show_nick"]?.try &.as(String)
+ show_nick ||= "off"
+ show_nick = show_nick == "on"
+
comments = [] of String
2.times do |i|
comments << (env.params.body["comments[#{i}]"]?.try &.as(String) || CONFIG.default_user_preferences.comments[i])
@@ -84,6 +110,12 @@ class Invidious::Routes::PreferencesRoute < Invidious::Routes::BaseRoute
end
end
+ automatic_instance_redirect = env.params.body["automatic_instance_redirect"]?.try &.as(String)
+ automatic_instance_redirect ||= "off"
+ automatic_instance_redirect = automatic_instance_redirect == "on"
+
+ region = env.params.body["region"]?.try &.as(String)
+
locale = env.params.body["locale"]?.try &.as(String)
locale ||= CONFIG.default_user_preferences.locale
@@ -112,39 +144,48 @@ class Invidious::Routes::PreferencesRoute < Invidious::Routes::BaseRoute
notifications_only ||= "off"
notifications_only = notifications_only == "on"
- # Convert to JSON and back again to take advantage of converters used for compatability
+ # Convert to JSON and back again to take advantage of converters used for compatibility
preferences = Preferences.from_json({
- annotations: annotations,
- annotations_subscribed: annotations_subscribed,
- autoplay: autoplay,
- captions: captions,
- comments: comments,
- continue: continue,
- continue_autoplay: continue_autoplay,
- dark_mode: dark_mode,
- latest_only: latest_only,
- listen: listen,
- local: local,
- locale: locale,
- max_results: max_results,
- notifications_only: notifications_only,
- player_style: player_style,
- quality: quality,
- quality_dash: quality_dash,
- default_home: default_home,
- feed_menu: feed_menu,
- related_videos: related_videos,
- sort: sort,
- speed: speed,
- thin_mode: thin_mode,
- unseen_only: unseen_only,
- video_loop: video_loop,
- volume: volume,
- }.to_json).to_json
+ annotations: annotations,
+ annotations_subscribed: annotations_subscribed,
+ preload: preload,
+ autoplay: autoplay,
+ captions: captions,
+ comments: comments,
+ continue: continue,
+ continue_autoplay: continue_autoplay,
+ dark_mode: dark_mode,
+ latest_only: latest_only,
+ listen: listen,
+ local: local,
+ watch_history: watch_history,
+ locale: locale,
+ max_results: max_results,
+ notifications_only: notifications_only,
+ player_style: player_style,
+ quality: quality,
+ quality_dash: quality_dash,
+ default_home: default_home,
+ feed_menu: feed_menu,
+ automatic_instance_redirect: automatic_instance_redirect,
+ region: region,
+ related_videos: related_videos,
+ sort: sort,
+ speed: speed,
+ thin_mode: thin_mode,
+ unseen_only: unseen_only,
+ video_loop: video_loop,
+ volume: volume,
+ extend_desc: extend_desc,
+ vr_mode: vr_mode,
+ show_nick: show_nick,
+ save_player_pos: save_player_pos,
+ }.to_json)
if user = env.get? "user"
user = user.as(User)
- PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", preferences, user.email)
+ user.preferences = preferences
+ Invidious::Database::Users.update_preferences(user)
if CONFIG.admins.includes? user.email
CONFIG.default_user_preferences.default_home = env.params.body["admin_default_home"]?.try &.as(String) || CONFIG.default_user_preferences.default_home
@@ -178,29 +219,19 @@ class Invidious::Routes::PreferencesRoute < Invidious::Routes::BaseRoute
statistics_enabled ||= "off"
CONFIG.statistics_enabled = statistics_enabled == "on"
+ CONFIG.modified_source_code_url = env.params.body["modified_source_code_url"]?.presence
+
File.write("config/config.yml", CONFIG.to_yaml)
end
else
- if Kemal.config.ssl || CONFIG.https_only
- secure = true
- else
- secure = false
- end
-
- if CONFIG.domain
- env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", domain: "#{CONFIG.domain}", value: preferences, expires: Time.utc + 2.years,
- secure: secure, http_only: true)
- else
- env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", value: preferences, expires: Time.utc + 2.years,
- secure: secure, http_only: true)
- end
+ env.response.cookies["PREFS"] = Invidious::User::Cookies.prefs(CONFIG.domain, preferences)
end
env.redirect referer
end
- def toggle_theme(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ def self.toggle_theme(env)
+ locale = env.get("preferences").as(Preferences).locale
referer = get_referer(env, unroll: false)
redirect = env.params.query["redirect"]?
@@ -209,18 +240,15 @@ class Invidious::Routes::PreferencesRoute < Invidious::Routes::BaseRoute
if user = env.get? "user"
user = user.as(User)
- preferences = user.preferences
- case preferences.dark_mode
+ case user.preferences.dark_mode
when "dark"
- preferences.dark_mode = "light"
+ user.preferences.dark_mode = "light"
else
- preferences.dark_mode = "dark"
+ user.preferences.dark_mode = "dark"
end
- preferences = preferences.to_json
-
- PG_DB.exec("UPDATE users SET preferences = $1 WHERE email = $2", preferences, user.email)
+ Invidious::Database::Users.update_preferences(user)
else
preferences = env.get("preferences").as(Preferences)
@@ -231,21 +259,7 @@ class Invidious::Routes::PreferencesRoute < Invidious::Routes::BaseRoute
preferences.dark_mode = "dark"
end
- preferences = preferences.to_json
-
- if Kemal.config.ssl || CONFIG.https_only
- secure = true
- else
- secure = false
- end
-
- if CONFIG.domain
- env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", domain: "#{CONFIG.domain}", value: preferences, expires: Time.utc + 2.years,
- secure: secure, http_only: true)
- else
- env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", value: preferences, expires: Time.utc + 2.years,
- secure: secure, http_only: true)
- end
+ env.response.cookies["PREFS"] = Invidious::User::Cookies.prefs(CONFIG.domain, preferences)
end
if redirect
@@ -255,4 +269,87 @@ class Invidious::Routes::PreferencesRoute < Invidious::Routes::BaseRoute
"{}"
end
end
+
+ def self.data_control(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ referer = get_referer(env)
+
+ if !user
+ return env.redirect referer
+ end
+
+ user = user.as(User)
+
+ templated "user/data_control"
+ end
+
+ def self.update_data_control(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ referer = get_referer(env)
+
+ if user
+ user = user.as(User)
+
+ # TODO: Find a way to prevent browser timeout
+
+ HTTP::FormData.parse(env.request) do |part|
+ body = part.body.gets_to_end
+ type = part.headers["Content-Type"]
+
+ next if body.empty?
+
+ # TODO: Unify into single import based on content-type
+ case part.name
+ when "import_invidious"
+ Invidious::User::Import.from_invidious(user, body)
+ when "import_youtube"
+ filename = part.filename || ""
+ success = Invidious::User::Import.from_youtube(user, body, filename, type)
+
+ if !success
+ haltf(env, status_code: 415,
+ response: error_template(415, "Invalid subscription file uploaded")
+ )
+ end
+ when "import_youtube_pl"
+ filename = part.filename || ""
+ success = Invidious::User::Import.from_youtube_pl(user, body, filename, type)
+
+ if !success
+ haltf(env, status_code: 415,
+ response: error_template(415, "Invalid playlist file uploaded")
+ )
+ end
+ when "import_youtube_wh"
+ filename = part.filename || ""
+ success = Invidious::User::Import.from_youtube_wh(user, body, filename, type)
+
+ if !success
+ haltf(env, status_code: 415,
+ response: error_template(415, "Invalid watch history file uploaded")
+ )
+ end
+ when "import_freetube"
+ Invidious::User::Import.from_freetube(user, body)
+ when "import_newpipe_subscriptions"
+ Invidious::User::Import.from_newpipe_subs(user, body)
+ when "import_newpipe"
+ success = Invidious::User::Import.from_newpipe(user, body)
+
+ if !success
+ haltf(env, status_code: 415,
+ response: error_template(415, "Uploaded file is too large")
+ )
+ end
+ else nil # Ignore
+ end
+ end
+ end
+
+ env.redirect referer
+ end
end
diff --git a/src/invidious/routes/search.cr b/src/invidious/routes/search.cr
index a993a17a..44970922 100644
--- a/src/invidious/routes/search.cr
+++ b/src/invidious/routes/search.cr
@@ -1,6 +1,8 @@
-class Invidious::Routes::Search < Invidious::Routes::BaseRoute
- def opensearch(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+{% skip_file if flag?(:api_only) %}
+
+module Invidious::Routes::Search
+ def self.opensearch(env)
+ locale = env.get("preferences").as(Preferences).locale
env.response.content_type = "application/opensearchdescription+xml"
XML.build(indent: " ", encoding: "UTF-8") do |xml|
@@ -15,51 +17,103 @@ class Invidious::Routes::Search < Invidious::Routes::BaseRoute
end
end
- def results(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+ def self.results(env)
+ locale = env.get("preferences").as(Preferences).locale
query = env.params.query["search_query"]?
query ||= env.params.query["q"]?
- query ||= ""
- page = env.params.query["page"]?.try &.to_i?
- page ||= 1
+ page = env.params.query["page"]?
- if query
- env.redirect "/search?q=#{URI.encode_www_form(query)}&page=#{page}"
+ if query && !query.empty?
+ if page && !page.empty?
+ env.redirect "/search?q=" + URI.encode_www_form(query) + "&page=" + page
+ else
+ env.redirect "/search?q=" + URI.encode_www_form(query)
+ end
else
- env.redirect "/"
+ env.redirect "/search"
end
end
- def search(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
- region = env.params.query["region"]?
+ def self.search(env)
+ prefs = env.get("preferences").as(Preferences)
+ locale = prefs.locale
- query = env.params.query["search_query"]?
- query ||= env.params.query["q"]?
- query ||= ""
+ region = env.params.query["region"]? || prefs.region
+
+ query = Invidious::Search::Query.new(env.params.query, :regular, region)
+
+ if query.empty?
+ # Display the full page search box implemented in #1977
+ env.set "search", ""
+ templated "search_homepage", navbar_search: false
+ else
+ user = env.get? "user"
+
+ # An URL was copy/pasted in the search box.
+ # Redirect the user to the appropriate page.
+ if query.url?
+ return env.redirect UrlSanitizer.process(query.text).to_s
+ end
+
+ begin
+ items = query.process
+ rescue ex : ChannelSearchException
+ return error_template(404, "Unable to find channel with id of '#{HTML.escape(ex.channel)}'. Are you sure that's an actual channel id? It should look like 'UC4QobU6STFB0P71PMvOGN5A'.")
+ rescue ex
+ return error_template(500, ex)
+ end
- return env.redirect "/" if query.empty?
+ redirect_url = Invidious::Frontend::Misc.redirect_url(env)
- page = env.params.query["page"]?.try &.to_i?
- page ||= 1
+ # Pagination
+ page_nav_html = Frontend::Pagination.nav_numeric(locale,
+ base_url: "/search?#{query.to_http_params}",
+ current_page: query.page,
+ show_next: (items.size >= 20)
+ )
- user = env.get? "user"
+ if query.type == Invidious::Search::Query::Type::Channel
+ env.set "search", "channel:#{query.channel} #{query.text}"
+ else
+ env.set "search", query.text
+ end
+
+ templated "search"
+ end
+ end
+
+ def self.hashtag(env : HTTP::Server::Context)
+ locale = env.get("preferences").as(Preferences).locale
+
+ hashtag = env.params.url["hashtag"]?
+ if hashtag.nil? || hashtag.empty?
+ return error_template(400, "Invalid request")
+ end
+
+ page = env.params.query["page"]?
+ if page.nil?
+ page = 1
+ else
+ page = Math.max(1, page.to_i)
+ env.params.query.delete_all("page")
+ end
begin
- search_query, count, videos, operators = process_search_query(query, page, user, region: nil)
+ items = Invidious::Hashtag.fetch(hashtag, page)
rescue ex
return error_template(500, ex)
end
- operator_hash = {} of String => String
- operators.each do |operator|
- key, value = operator.downcase.split(":")
- operator_hash[key] = value
- end
+ # Pagination
+ hashtag_encoded = URI.encode_www_form(hashtag, space_to_plus: false)
+ page_nav_html = Frontend::Pagination.nav_numeric(locale,
+ base_url: "/hashtag/#{hashtag_encoded}",
+ current_page: page,
+ show_next: (items.size >= 60)
+ )
- env.set "search", query
- templated "search"
+ templated "hashtag"
end
end
diff --git a/src/invidious/routes/subscriptions.cr b/src/invidious/routes/subscriptions.cr
new file mode 100644
index 00000000..7f9ec592
--- /dev/null
+++ b/src/invidious/routes/subscriptions.cr
@@ -0,0 +1,130 @@
+module Invidious::Routes::Subscriptions
+ def self.toggle_subscription(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ sid = env.get? "sid"
+ referer = get_referer(env, "/")
+
+ redirect = env.params.query["redirect"]?
+ redirect ||= "true"
+ redirect = redirect == "true"
+
+ if !user
+ if redirect
+ return env.redirect referer
+ else
+ return error_json(403, "No such user")
+ end
+ end
+
+ user = user.as(User)
+ sid = sid.as(String)
+ token = env.params.body["csrf_token"]?
+
+ begin
+ validate_request(token, sid, env.request, HMAC_KEY, locale)
+ rescue ex
+ if redirect
+ return error_template(400, ex)
+ else
+ return error_json(400, ex)
+ end
+ end
+
+ if env.params.query["action_create_subscription_to_channel"]?.try &.to_i?.try &.== 1
+ action = "action_create_subscription_to_channel"
+ elsif env.params.query["action_remove_subscriptions"]?.try &.to_i?.try &.== 1
+ action = "action_remove_subscriptions"
+ else
+ return env.redirect referer
+ end
+
+ channel_id = env.params.query["c"]?
+ channel_id ||= ""
+
+ case action
+ when "action_create_subscription_to_channel"
+ if !user.subscriptions.includes? channel_id
+ get_channel(channel_id)
+ Invidious::Database::Users.subscribe_channel(user, channel_id)
+ end
+ when "action_remove_subscriptions"
+ Invidious::Database::Users.unsubscribe_channel(user, channel_id)
+ else
+ return error_json(400, "Unsupported action #{action}")
+ end
+
+ if redirect
+ env.redirect referer
+ else
+ env.response.content_type = "application/json"
+ "{}"
+ end
+ end
+
+ def self.subscription_manager(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ sid = env.get? "sid"
+ referer = get_referer(env)
+
+ if !user
+ return env.redirect referer
+ end
+
+ user = user.as(User)
+ sid = sid.as(String)
+
+ action_takeout = env.params.query["action_takeout"]?.try &.to_i?
+ action_takeout ||= 0
+ action_takeout = action_takeout == 1
+
+ format = env.params.query["format"]?
+ format ||= "rss"
+
+ subscriptions = Invidious::Database::Channels.select(user.subscriptions)
+ subscriptions.sort_by!(&.author.downcase)
+
+ if action_takeout
+ if format == "json"
+ env.response.content_type = "application/json"
+ env.response.headers["content-disposition"] = "attachment"
+
+ return Invidious::User::Export.to_invidious(user)
+ else
+ env.response.content_type = "application/xml"
+ env.response.headers["content-disposition"] = "attachment"
+ export = XML.build do |xml|
+ xml.element("opml", version: "1.1") do
+ xml.element("body") do
+ if format == "newpipe"
+ title = "YouTube Subscriptions"
+ else
+ title = "Invidious Subscriptions"
+ end
+
+ xml.element("outline", text: title, title: title) do
+ subscriptions.each do |channel|
+ if format == "newpipe"
+ xml_url = "https://www.youtube.com/feeds/videos.xml?channel_id=#{channel.id}"
+ else
+ xml_url = "#{HOST_URL}/feed/channel/#{channel.id}"
+ end
+
+ xml.element("outline", text: channel.author, title: channel.author,
+ "type": "rss", xmlUrl: xml_url)
+ end
+ end
+ end
+ end
+ end
+
+ return export.gsub(%(<?xml version="1.0"?>\n), "")
+ end
+ end
+
+ templated "user/subscription_manager"
+ end
+end
diff --git a/src/invidious/routes/video_playback.cr b/src/invidious/routes/video_playback.cr
new file mode 100644
index 00000000..26852d06
--- /dev/null
+++ b/src/invidious/routes/video_playback.cr
@@ -0,0 +1,303 @@
+module Invidious::Routes::VideoPlayback
+ # /videoplayback
+ def self.get_video_playback(env)
+ locale = env.get("preferences").as(Preferences).locale
+ query_params = env.params.query
+
+ fvip = query_params["fvip"]? || "3"
+ mns = query_params["mn"]?.try &.split(",")
+ mns ||= [] of String
+
+ if query_params["region"]?
+ region = query_params["region"]
+ query_params.delete("region")
+ end
+
+ if query_params["host"]? && !query_params["host"].empty?
+ host = query_params["host"]
+ query_params.delete("host")
+ else
+ host = "r#{fvip}---#{mns.pop}.googlevideo.com"
+ end
+
+ # Sanity check, to avoid being used as an open proxy
+ if !host.matches?(/[\w-]+.googlevideo.com/)
+ return error_template(400, "Invalid \"host\" parameter.")
+ end
+
+ host = "https://#{host}"
+ url = "/videoplayback?#{query_params}"
+
+ headers = HTTP::Headers.new
+ REQUEST_HEADERS_WHITELIST.each do |header|
+ if env.request.headers[header]?
+ headers[header] = env.request.headers[header]
+ end
+ end
+
+ # See: https://github.com/iv-org/invidious/issues/3302
+ range_header = env.request.headers["Range"]?
+ if range_header.nil?
+ range_for_head = query_params["range"]? || "0-640"
+ headers["Range"] = "bytes=#{range_for_head}"
+ end
+
+ client = make_client(URI.parse(host), region, force_resolve: true)
+ response = HTTP::Client::Response.new(500)
+ error = ""
+ 5.times do
+ begin
+ response = client.head(url, headers)
+
+ if response.headers["Location"]?
+ location = URI.parse(response.headers["Location"])
+ env.response.headers["Access-Control-Allow-Origin"] = "*"
+
+ new_host = "#{location.scheme}://#{location.host}"
+ if new_host != host
+ host = new_host
+ client.close
+ client = make_client(URI.parse(new_host), region, force_resolve: true)
+ end
+
+ url = "#{location.request_target}&host=#{location.host}#{region ? "&region=#{region}" : ""}"
+ else
+ break
+ end
+ rescue Socket::Addrinfo::Error
+ if !mns.empty?
+ mn = mns.pop
+ end
+ fvip = "3"
+
+ host = "https://r#{fvip}---#{mn}.googlevideo.com"
+ client = make_client(URI.parse(host), region, force_resolve: true)
+ rescue ex
+ error = ex.message
+ end
+ end
+
+ # Remove the Range header added previously.
+ headers.delete("Range") if range_header.nil?
+
+ playback_statistics = get_playback_statistic()
+ playback_statistics["totalRequests"] += 1
+
+ if response.status_code >= 400
+ env.response.content_type = "text/plain"
+ haltf env, response.status_code
+ else
+ playback_statistics["successfulRequests"] += 1
+ end
+
+ if url.includes? "&file=seg.ts"
+ if CONFIG.disabled?("livestreams")
+ return error_template(403, "Administrator has disabled this endpoint.")
+ end
+
+ begin
+ client.get(url, headers) do |resp|
+ resp.headers.each do |key, value|
+ if !RESPONSE_HEADERS_BLACKLIST.includes?(key.downcase)
+ env.response.headers[key] = value
+ end
+ end
+
+ env.response.headers["Access-Control-Allow-Origin"] = "*"
+
+ if location = resp.headers["Location"]?
+ url = Invidious::HttpServer::Utils.proxy_video_url(location, region: region)
+ return env.redirect url
+ end
+
+ IO.copy(resp.body_io, env.response)
+ end
+ rescue ex
+ end
+ else
+ if query_params["title"]? && CONFIG.disabled?("downloads") ||
+ CONFIG.disabled?("dash")
+ return error_template(403, "Administrator has disabled this endpoint.")
+ end
+
+ content_length = nil
+ first_chunk = true
+ range_start, range_end = parse_range(env.request.headers["Range"]?)
+ chunk_start = range_start
+ chunk_end = range_end
+
+ if !chunk_end || chunk_end - chunk_start > HTTP_CHUNK_SIZE
+ chunk_end = chunk_start + HTTP_CHUNK_SIZE - 1
+ end
+
+ # TODO: Record bytes written so we can restart after a chunk fails
+ loop do
+ if !range_end && content_length
+ range_end = content_length
+ end
+
+ if range_end && chunk_start > range_end
+ break
+ end
+
+ if range_end && chunk_end > range_end
+ chunk_end = range_end
+ end
+
+ headers["Range"] = "bytes=#{chunk_start}-#{chunk_end}"
+
+ begin
+ client.get(url, headers) do |resp|
+ if first_chunk
+ if !env.request.headers["Range"]? && resp.status_code == 206
+ env.response.status_code = 200
+ else
+ env.response.status_code = resp.status_code
+ end
+
+ resp.headers.each do |key, value|
+ if !RESPONSE_HEADERS_BLACKLIST.includes?(key.downcase) && key.downcase != "content-range"
+ env.response.headers[key] = value
+ end
+ end
+
+ env.response.headers["Access-Control-Allow-Origin"] = "*"
+
+ if location = resp.headers["Location"]?
+ location = URI.parse(location)
+ location = "#{location.request_target}&host=#{location.host}#{region ? "&region=#{region}" : ""}"
+
+ env.redirect location
+ break
+ end
+
+ if title = query_params["title"]?
+ # https://blog.fastmail.com/2011/06/24/download-non-english-filenames/
+ filename = URI.encode_www_form(title, space_to_plus: false)
+ header = "attachment; filename=\"#{filename}\"; filename*=UTF-8''#{filename}"
+ env.response.headers["Content-Disposition"] = header
+ end
+
+ if !resp.headers.includes_word?("Transfer-Encoding", "chunked")
+ content_length = resp.headers["Content-Range"].split("/")[-1].to_i64
+ if env.request.headers["Range"]?
+ env.response.headers["Content-Range"] = "bytes #{range_start}-#{range_end || (content_length - 1)}/#{content_length}"
+ env.response.content_length = ((range_end.try &.+ 1) || content_length) - range_start
+ else
+ env.response.content_length = content_length
+ end
+ end
+ end
+
+ proxy_file(resp, env)
+ end
+ rescue ex
+ if ex.message != "Error reading socket: Connection reset by peer"
+ break
+ else
+ client.close
+ client = make_client(URI.parse(host), region, force_resolve: true)
+ end
+ end
+
+ chunk_start = chunk_end + 1
+ chunk_end += HTTP_CHUNK_SIZE
+ first_chunk = false
+ end
+ end
+ client.close
+ end
+
+ # /videoplayback/*
+ def self.get_video_playback_greedy(env)
+ path = env.request.path
+
+ path = path.lchop("/videoplayback/")
+ path = path.rchop("/")
+
+ path = path.gsub(/mime\/\w+\/\w+/) do |mimetype|
+ mimetype = mimetype.split("/")
+ mimetype[0] + "/" + mimetype[1] + "%2F" + mimetype[2]
+ end
+
+ path = path.split("/")
+
+ raw_params = {} of String => Array(String)
+ path.each_slice(2) do |pair|
+ key, value = pair
+ value = URI.decode_www_form(value)
+
+ if raw_params[key]?
+ raw_params[key] << value
+ else
+ raw_params[key] = [value]
+ end
+ end
+
+ query_params = HTTP::Params.new(raw_params)
+
+ env.response.headers["Access-Control-Allow-Origin"] = "*"
+ return env.redirect "/videoplayback?#{query_params}"
+ end
+
+ # /videoplayback/* && /videoplayback/*
+ def self.options_video_playback(env)
+ env.response.headers.delete("Content-Type")
+ env.response.headers["Access-Control-Allow-Origin"] = "*"
+ env.response.headers["Access-Control-Allow-Methods"] = "GET, OPTIONS"
+ env.response.headers["Access-Control-Allow-Headers"] = "Content-Type, Range"
+ end
+
+ # /latest_version
+ #
+ # YouTube /videoplayback links expire after 6 hours,
+ # so we have a mechanism here to redirect to the latest version
+ def self.latest_version(env)
+ id = env.params.query["id"]?
+ itag = env.params.query["itag"]?.try &.to_i?
+
+ # Sanity checks
+ if id.nil? || id.size != 11 || !id.matches?(/^[\w-]+$/)
+ return error_template(400, "Invalid video ID")
+ end
+
+ if !itag.nil? && (itag <= 0 || itag >= 1000)
+ return error_template(400, "Invalid itag")
+ end
+
+ region = env.params.query["region"]?
+ local = (env.params.query["local"]? == "true")
+
+ title = env.params.query["title"]?
+
+ if title && CONFIG.disabled?("downloads")
+ return error_template(403, "Administrator has disabled this endpoint.")
+ end
+
+ begin
+ video = get_video(id, region: region)
+ rescue ex : NotFoundException
+ return error_template(404, ex)
+ rescue ex
+ return error_template(500, ex)
+ end
+
+ if itag.nil?
+ fmt = video.fmt_stream[-1]?
+ else
+ fmt = video.fmt_stream.find(nil) { |f| f["itag"].as_i == itag } || video.adaptive_fmts.find(nil) { |f| f["itag"].as_i == itag }
+ end
+ url = fmt.try &.["url"]?.try &.as_s
+
+ if !url
+ haltf env, status_code: 404
+ end
+
+ if local
+ url = URI.parse(url).request_target.not_nil!
+ url += "&title=#{URI.encode_www_form(title, space_to_plus: false)}" if title
+ end
+
+ return env.redirect url
+ end
+end
diff --git a/src/invidious/routes/watch.cr b/src/invidious/routes/watch.cr
index d0338882..aabe8dfc 100644
--- a/src/invidious/routes/watch.cr
+++ b/src/invidious/routes/watch.cr
@@ -1,6 +1,8 @@
-class Invidious::Routes::Watch < Invidious::Routes::BaseRoute
- def handle(env)
- locale = LOCALES[env.get("preferences").as(Preferences).locale]?
+{% skip_file if flag?(:api_only) %}
+
+module Invidious::Routes::Watch
+ def self.handle(env)
+ locale = env.get("preferences").as(Preferences).locale
region = env.params.query["region"]?
if env.params.query.to_s.includes?("%20") || env.params.query.to_s.includes?("+")
@@ -28,16 +30,8 @@ class Invidious::Routes::Watch < Invidious::Routes::BaseRoute
return env.redirect "/"
end
- embed_link = "/embed/#{id}"
- if env.params.query.size > 1
- embed_params = HTTP::Params.parse(env.params.query.to_s)
- embed_params.delete_all("v")
- embed_link += "?"
- embed_link += embed_params.to_s
- end
-
plid = env.params.query["list"]?.try &.gsub(/[^a-zA-Z0-9_-]/, "")
- continuation = process_continuation(PG_DB, env.params.query, plid, id)
+ continuation = process_continuation(env.params.query, plid, id)
nojs = env.params.query["nojs"]?
@@ -58,9 +52,10 @@ class Invidious::Routes::Watch < Invidious::Routes::BaseRoute
env.params.query.delete_all("listen")
begin
- video = get_video(id, PG_DB, region: params.region)
- rescue ex : VideoRedirect
- return env.redirect env.request.resource.gsub(id, ex.video_id)
+ video = get_video(id, region: params.region)
+ rescue ex : NotFoundException
+ LOGGER.error("get_video not found: #{id} : #{ex.message}")
+ return error_template(404, ex)
rescue ex
LOGGER.error("get_video: #{id} : #{ex.message}")
return error_template(500, ex)
@@ -73,12 +68,12 @@ class Invidious::Routes::Watch < Invidious::Routes::BaseRoute
end
env.params.query.delete_all("iv_load_policy")
- if watched && !watched.includes? id
- PG_DB.exec("UPDATE users SET watched = array_append(watched, $1) WHERE email = $2", id, user.as(User).email)
+ if watched && preferences.watch_history
+ Invidious::Database::Users.mark_watched(user.as(User), id)
end
- if notifications && notifications.includes? id
- PG_DB.exec("UPDATE users SET notifications = array_remove(notifications, $1) WHERE email = $2", id, user.as(User).email)
+ if CONFIG.enable_user_notifications && notifications && notifications.includes? id
+ Invidious::Database::Users.remove_notification(user.as(User), id)
env.get("user").as(User).notifications.delete(id)
notifications.delete(id)
end
@@ -92,31 +87,31 @@ class Invidious::Routes::Watch < Invidious::Routes::BaseRoute
if source == "youtube"
begin
- comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, "html", locale, preferences.thin_mode, region))["contentHtml"]
+ comment_html = JSON.parse(Comments.fetch_youtube(id, nil, "html", locale, preferences.thin_mode, region))["contentHtml"]
rescue ex
if preferences.comments[1] == "reddit"
- comments, reddit_thread = fetch_reddit_comments(id)
- comment_html = template_reddit_comments(comments, locale)
+ comments, reddit_thread = Comments.fetch_reddit(id)
+ comment_html = Frontend::Comments.template_reddit(comments, locale)
- comment_html = fill_links(comment_html, "https", "www.reddit.com")
- comment_html = replace_links(comment_html)
+ comment_html = Comments.fill_links(comment_html, "https", "www.reddit.com")
+ comment_html = Comments.replace_links(comment_html)
end
end
elsif source == "reddit"
begin
- comments, reddit_thread = fetch_reddit_comments(id)
- comment_html = template_reddit_comments(comments, locale)
+ comments, reddit_thread = Comments.fetch_reddit(id)
+ comment_html = Frontend::Comments.template_reddit(comments, locale)
- comment_html = fill_links(comment_html, "https", "www.reddit.com")
- comment_html = replace_links(comment_html)
+ comment_html = Comments.fill_links(comment_html, "https", "www.reddit.com")
+ comment_html = Comments.replace_links(comment_html)
rescue ex
if preferences.comments[1] == "youtube"
- comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, "html", locale, preferences.thin_mode, region))["contentHtml"]
+ comment_html = JSON.parse(Comments.fetch_youtube(id, nil, "html", locale, preferences.thin_mode, region))["contentHtml"]
end
end
end
else
- comment_html = JSON.parse(fetch_youtube_comments(id, PG_DB, nil, "html", locale, preferences.thin_mode, region))["contentHtml"]
+ comment_html = JSON.parse(Comments.fetch_youtube(id, nil, "html", locale, preferences.thin_mode, region))["contentHtml"]
end
comment_html ||= ""
@@ -150,12 +145,12 @@ class Invidious::Routes::Watch < Invidious::Routes::BaseRoute
captions = video.captions
preferred_captions = captions.select { |caption|
- params.preferred_captions.includes?(caption.name.simpleText) ||
- params.preferred_captions.includes?(caption.languageCode.split("-")[0])
+ params.preferred_captions.includes?(caption.name) ||
+ params.preferred_captions.includes?(caption.language_code.split("-")[0])
}
preferred_captions.sort_by! { |caption|
- (params.preferred_captions.index(caption.name.simpleText) ||
- params.preferred_captions.index(caption.languageCode.split("-")[0])).not_nil!
+ (params.preferred_captions.index(caption.name) ||
+ params.preferred_captions.index(caption.language_code.split("-")[0])).not_nil!
}
captions = captions - preferred_captions
@@ -167,9 +162,11 @@ class Invidious::Routes::Watch < Invidious::Routes::BaseRoute
if params.listen
url = audio_streams[0]["url"].as_s
- audio_streams.each do |fmt|
- if fmt["bitrate"].as_i == params.quality.rchop("k").to_i
- url = fmt["url"].as_s
+ if params.quality.ends_with? "k"
+ audio_streams.each do |fmt|
+ if fmt["bitrate"].as_i == params.quality.rchop("k").to_i
+ url = fmt["url"].as_s
+ end
end
end
else
@@ -185,10 +182,18 @@ class Invidious::Routes::Watch < Invidious::Routes::BaseRoute
return env.redirect url
end
+ # Structure used for the download widget
+ video_assets = Invidious::Frontend::WatchPage::VideoAssets.new(
+ full_videos: fmt_stream,
+ video_streams: video_streams,
+ audio_streams: audio_streams,
+ captions: video.captions
+ )
+
templated "watch"
end
- def redirect(env)
+ def self.redirect(env)
url = "/watch?v=#{env.params.url["id"]}"
if env.params.query.size > 0
url += "&#{env.params.query}"
@@ -196,4 +201,135 @@ class Invidious::Routes::Watch < Invidious::Routes::BaseRoute
return env.redirect url
end
+
+ def self.mark_watched(env)
+ locale = env.get("preferences").as(Preferences).locale
+
+ user = env.get? "user"
+ sid = env.get? "sid"
+ referer = get_referer(env, "/feed/subscriptions")
+
+ redirect = env.params.query["redirect"]?
+ redirect ||= "true"
+ redirect = redirect == "true"
+
+ if !user
+ if redirect
+ return env.redirect referer
+ else
+ return error_json(403, "No such user")
+ end
+ end
+
+ user = user.as(User)
+ sid = sid.as(String)
+ token = env.params.body["csrf_token"]?
+
+ id = env.params.query["id"]?
+ if !id
+ env.response.status_code = 400
+ return
+ end
+
+ begin
+ validate_request(token, sid, env.request, HMAC_KEY, locale)
+ rescue ex
+ if redirect
+ return error_template(400, ex)
+ else
+ return error_json(400, ex)
+ end
+ end
+
+ if env.params.query["action_mark_watched"]?
+ action = "action_mark_watched"
+ elsif env.params.query["action_mark_unwatched"]?
+ action = "action_mark_unwatched"
+ else
+ return env.redirect referer
+ end
+
+ case action
+ when "action_mark_watched"
+ Invidious::Database::Users.mark_watched(user, id)
+ when "action_mark_unwatched"
+ Invidious::Database::Users.mark_unwatched(user, id)
+ else
+ return error_json(400, "Unsupported action #{action}")
+ end
+
+ if redirect
+ env.redirect referer
+ else
+ env.response.content_type = "application/json"
+ "{}"
+ end
+ end
+
+ def self.clip(env)
+ clip_id = env.params.url["clip"]?
+
+ return error_template(400, "A clip ID is required") if !clip_id
+
+ response = YoutubeAPI.resolve_url("https://www.youtube.com/clip/#{clip_id}")
+ return error_template(400, "Invalid clip ID") if response["error"]?
+
+ if video_id = response.dig?("endpoint", "watchEndpoint", "videoId")
+ if params = response.dig?("endpoint", "watchEndpoint", "params").try &.as_s
+ start_time, end_time, _ = parse_clip_parameters(params)
+ env.params.query["start"] = start_time.to_s if start_time != nil
+ env.params.query["end"] = end_time.to_s if end_time != nil
+ end
+
+ return env.redirect "/watch?v=#{video_id}&#{env.params.query}"
+ else
+ return error_template(404, "The requested clip doesn't exist")
+ end
+ end
+
+ def self.download(env)
+ if CONFIG.disabled?("downloads")
+ return error_template(403, "Administrator has disabled this endpoint.")
+ end
+
+ title = env.params.body["title"]? || ""
+ video_id = env.params.body["id"]? || ""
+ selection = env.params.body["download_widget"]?
+
+ if title.empty? || video_id.empty? || selection.nil?
+ return error_template(400, "Missing form data")
+ end
+
+ download_widget = JSON.parse(selection)
+
+ extension = download_widget["ext"].as_s
+ filename = "#{title}-#{video_id}.#{extension}"
+
+ # Delete the now useless URL parameters
+ env.params.body.delete("id")
+ env.params.body.delete("title")
+ env.params.body.delete("download_widget")
+
+ # Pass form parameters as URL parameters for the handlers of both
+ # /latest_version and /api/v1/captions. This avoids an un-necessary
+ # redirect and duplicated (and hazardous) sanity checks.
+ if label = download_widget["label"]?
+ # URL params specific to /api/v1/captions/:id
+ env.params.url["id"] = video_id
+ env.params.query["title"] = filename
+ env.params.query["label"] = URI.decode_www_form(label.as_s)
+
+ return Invidious::Routes::API::V1::Videos.captions(env)
+ elsif itag = download_widget["itag"]?.try &.as_i
+ # URL params specific to /latest_version
+ env.params.query["id"] = video_id
+ env.params.query["itag"] = itag.to_s
+ env.params.query["title"] = filename
+ env.params.query["local"] = "true"
+
+ return Invidious::Routes::VideoPlayback.latest_version(env)
+ else
+ return error_template(400, "Invalid label or itag")
+ end
+ end
end
diff --git a/src/invidious/routing.cr b/src/invidious/routing.cr
index 82d0028b..9009062f 100644
--- a/src/invidious/routing.cr
+++ b/src/invidious/routing.cr
@@ -1,15 +1,314 @@
module Invidious::Routing
- macro get(path, controller, method = :handle)
- get {{ path }} do |env|
- controller_instance = {{ controller }}.new
- controller_instance.{{ method.id }}(env)
+ extend self
+
+ {% for http_method in {"get", "post", "delete", "options", "patch", "put"} %}
+
+ macro {{http_method.id}}(path, controller, method = :handle)
+ unless Kemal::Utils.path_starts_with_slash?(\{{path}})
+ raise Kemal::Exceptions::InvalidPathStartException.new({{http_method}}, \{{path}})
+ end
+
+ Kemal::RouteHandler::INSTANCE.add_route({{http_method.upcase}}, \{{path}}) do |env|
+ \{{ controller }}.\{{ method.id }}(env)
+ end
end
+
+ {% end %}
+
+ def register_all
+ {% unless flag?(:api_only) %}
+ get "/", Routes::Misc, :home
+ get "/privacy", Routes::Misc, :privacy
+ get "/licenses", Routes::Misc, :licenses
+ get "/redirect", Routes::Misc, :cross_instance_redirect
+
+ self.register_channel_routes
+ self.register_watch_routes
+
+ self.register_iv_playlist_routes
+ self.register_yt_playlist_routes
+
+ self.register_search_routes
+
+ self.register_user_routes
+ self.register_feed_routes
+
+ # Support push notifications via PubSubHubbub
+ get "/feed/webhook/:token", Routes::Feeds, :push_notifications_get
+ post "/feed/webhook/:token", Routes::Feeds, :push_notifications_post
+
+ if CONFIG.enable_user_notifications
+ get "/modify_notifications", Routes::Notifications, :modify
+ end
+ {% end %}
+
+ self.register_image_routes
+ self.register_api_v1_routes
+ self.register_api_manifest_routes
+ self.register_video_playback_routes
end
- macro post(path, controller, method = :handle)
- post {{ path }} do |env|
- controller_instance = {{ controller }}.new
- controller_instance.{{ method.id }}(env)
- end
+ # -------------------
+ # Invidious routes
+ # -------------------
+
+ def register_user_routes
+ # User login/out
+ get "/login", Routes::Login, :login_page
+ post "/login", Routes::Login, :login
+ post "/signout", Routes::Login, :signout
+
+ # User preferences
+ get "/preferences", Routes::PreferencesRoute, :show
+ post "/preferences", Routes::PreferencesRoute, :update
+ get "/toggle_theme", Routes::PreferencesRoute, :toggle_theme
+ get "/data_control", Routes::PreferencesRoute, :data_control
+ post "/data_control", Routes::PreferencesRoute, :update_data_control
+
+ # User account management
+ get "/change_password", Routes::Account, :get_change_password
+ post "/change_password", Routes::Account, :post_change_password
+ get "/delete_account", Routes::Account, :get_delete
+ post "/delete_account", Routes::Account, :post_delete
+ get "/clear_watch_history", Routes::Account, :get_clear_history
+ post "/clear_watch_history", Routes::Account, :post_clear_history
+ get "/authorize_token", Routes::Account, :get_authorize_token
+ post "/authorize_token", Routes::Account, :post_authorize_token
+ get "/token_manager", Routes::Account, :token_manager
+ post "/token_ajax", Routes::Account, :token_ajax
+ post "/subscription_ajax", Routes::Subscriptions, :toggle_subscription
+ get "/subscription_manager", Routes::Subscriptions, :subscription_manager
+ end
+
+ def register_iv_playlist_routes
+ get "/create_playlist", Routes::Playlists, :new
+ post "/create_playlist", Routes::Playlists, :create
+ get "/subscribe_playlist", Routes::Playlists, :subscribe
+ get "/delete_playlist", Routes::Playlists, :delete_page
+ post "/delete_playlist", Routes::Playlists, :delete
+ get "/edit_playlist", Routes::Playlists, :edit
+ post "/edit_playlist", Routes::Playlists, :update
+ get "/add_playlist_items", Routes::Playlists, :add_playlist_items_page
+ post "/playlist_ajax", Routes::Playlists, :playlist_ajax
+ end
+
+ def register_feed_routes
+ # Feeds
+ get "/view_all_playlists", Routes::Feeds, :view_all_playlists_redirect
+ get "/feed/playlists", Routes::Feeds, :playlists
+ get "/feed/popular", Routes::Feeds, :popular
+ get "/feed/trending", Routes::Feeds, :trending
+ get "/feed/subscriptions", Routes::Feeds, :subscriptions
+ get "/feed/history", Routes::Feeds, :history
+
+ # RSS Feeds
+ get "/feed/channel/:ucid", Routes::Feeds, :rss_channel
+ get "/feed/private", Routes::Feeds, :rss_private
+ get "/feed/playlist/:plid", Routes::Feeds, :rss_playlist
+ get "/feeds/videos.xml", Routes::Feeds, :rss_videos
+ end
+
+ # -------------------
+ # Youtube routes
+ # -------------------
+
+ def register_channel_routes
+ get "/channel/:ucid", Routes::Channels, :home
+ get "/channel/:ucid/home", Routes::Channels, :home
+ get "/channel/:ucid/videos", Routes::Channels, :videos
+ get "/channel/:ucid/shorts", Routes::Channels, :shorts
+ get "/channel/:ucid/streams", Routes::Channels, :streams
+ get "/channel/:ucid/podcasts", Routes::Channels, :podcasts
+ get "/channel/:ucid/releases", Routes::Channels, :releases
+ get "/channel/:ucid/playlists", Routes::Channels, :playlists
+ get "/channel/:ucid/community", Routes::Channels, :community
+ get "/channel/:ucid/channels", Routes::Channels, :channels
+ get "/channel/:ucid/about", Routes::Channels, :about
+
+ get "/channel/:ucid/live", Routes::Channels, :live
+ get "/user/:user/live", Routes::Channels, :live
+ get "/c/:user/live", Routes::Channels, :live
+ get "/post/:id", Routes::Channels, :post
+
+ # Channel catch-all, to redirect future routes to the channel's home
+ # NOTE: defined last in order to be processed after the other routes
+ get "/channel/:ucid/*", Routes::Channels, :redirect_home
+
+ # /c/LinusTechTips
+ get "/c/:user", Routes::Channels, :brand_redirect
+ get "/c/:user/:tab", Routes::Channels, :brand_redirect
+
+ # /user/linustechtips (Not always the same as /c/)
+ get "/user/:user", Routes::Channels, :brand_redirect
+ get "/user/:user/:tab", Routes::Channels, :brand_redirect
+
+ # /@LinusTechTips (Handle)
+ get "/@:user", Routes::Channels, :brand_redirect
+ get "/@:user/:tab", Routes::Channels, :brand_redirect
+
+ # /attribution_link?a=anything&u=/channel/UCZYTClx2T1of7BRZ86-8fow
+ get "/attribution_link", Routes::Channels, :brand_redirect
+ get "/attribution_link/:tab", Routes::Channels, :brand_redirect
+
+ # /profile?user=linustechtips
+ get "/profile", Routes::Channels, :profile
+ get "/profile/*", Routes::Channels, :profile
+ end
+
+ def register_watch_routes
+ get "/watch", Routes::Watch, :handle
+ post "/watch_ajax", Routes::Watch, :mark_watched
+ get "/watch/:id", Routes::Watch, :redirect
+ get "/live/:id", Routes::Watch, :redirect
+ get "/shorts/:id", Routes::Watch, :redirect
+ get "/clip/:clip", Routes::Watch, :clip
+ get "/w/:id", Routes::Watch, :redirect
+ get "/v/:id", Routes::Watch, :redirect
+ get "/e/:id", Routes::Watch, :redirect
+
+ post "/download", Routes::Watch, :download
+
+ get "/embed/", Routes::Embed, :redirect
+ get "/embed/:id", Routes::Embed, :show
+ end
+
+ def register_yt_playlist_routes
+ get "/playlist", Routes::Playlists, :show
+ get "/mix", Routes::Playlists, :mix
+ get "/watch_videos", Routes::Playlists, :watch_videos
+ end
+
+ def register_search_routes
+ get "/opensearch.xml", Routes::Search, :opensearch
+ get "/results", Routes::Search, :results
+ get "/search", Routes::Search, :search
+ get "/hashtag/:hashtag", Routes::Search, :hashtag
+ end
+
+ # -------------------
+ # Media proxy routes
+ # -------------------
+
+ def register_api_manifest_routes
+ get "/api/manifest/dash/id/:id", Routes::API::Manifest, :get_dash_video_id
+
+ get "/api/manifest/dash/id/videoplayback", Routes::API::Manifest, :get_dash_video_playback
+ get "/api/manifest/dash/id/videoplayback/*", Routes::API::Manifest, :get_dash_video_playback_greedy
+
+ options "/api/manifest/dash/id/videoplayback", Routes::API::Manifest, :options_dash_video_playback
+ options "/api/manifest/dash/id/videoplayback/*", Routes::API::Manifest, :options_dash_video_playback
+
+ get "/api/manifest/hls_playlist/*", Routes::API::Manifest, :get_hls_playlist
+ get "/api/manifest/hls_variant/*", Routes::API::Manifest, :get_hls_variant
+ end
+
+ def register_video_playback_routes
+ get "/videoplayback", Routes::VideoPlayback, :get_video_playback
+ get "/videoplayback/*", Routes::VideoPlayback, :get_video_playback_greedy
+
+ options "/videoplayback", Routes::VideoPlayback, :options_video_playback
+ options "/videoplayback/*", Routes::VideoPlayback, :options_video_playback
+
+ get "/latest_version", Routes::VideoPlayback, :latest_version
+ end
+
+ def register_image_routes
+ get "/ggpht/*", Routes::Images, :ggpht
+ options "/sb/:authority/:id/:storyboard/:index", Routes::Images, :options_storyboard
+ get "/sb/:authority/:id/:storyboard/:index", Routes::Images, :get_storyboard
+ get "/s_p/:id/:name", Routes::Images, :s_p_image
+ get "/yts/img/:name", Routes::Images, :yts_image
+ get "/vi/:id/:name", Routes::Images, :thumbnails
+ end
+
+ # -------------------
+ # API routes
+ # -------------------
+
+ def register_api_v1_routes
+ {% begin %}
+ {{namespace = Routes::API::V1}}
+
+ # Videos
+ get "/api/v1/videos/:id", {{namespace}}::Videos, :videos
+ get "/api/v1/storyboards/:id", {{namespace}}::Videos, :storyboards
+ get "/api/v1/captions/:id", {{namespace}}::Videos, :captions
+ get "/api/v1/annotations/:id", {{namespace}}::Videos, :annotations
+ get "/api/v1/comments/:id", {{namespace}}::Videos, :comments
+ get "/api/v1/clips/:id", {{namespace}}::Videos, :clips
+
+ # Feeds
+ get "/api/v1/trending", {{namespace}}::Feeds, :trending
+ get "/api/v1/popular", {{namespace}}::Feeds, :popular
+
+ # Channels
+ get "/api/v1/channels/:ucid", {{namespace}}::Channels, :home
+ get "/api/v1/channels/:ucid/latest", {{namespace}}::Channels, :latest
+ get "/api/v1/channels/:ucid/videos", {{namespace}}::Channels, :videos
+ get "/api/v1/channels/:ucid/shorts", {{namespace}}::Channels, :shorts
+ get "/api/v1/channels/:ucid/streams", {{namespace}}::Channels, :streams
+ get "/api/v1/channels/:ucid/podcasts", {{namespace}}::Channels, :podcasts
+ get "/api/v1/channels/:ucid/releases", {{namespace}}::Channels, :releases
+ get "/api/v1/channels/:ucid/playlists", {{namespace}}::Channels, :playlists
+ get "/api/v1/channels/:ucid/community", {{namespace}}::Channels, :community
+ get "/api/v1/channels/:ucid/channels", {{namespace}}::Channels, :channels
+ get "/api/v1/channels/:ucid/search", {{namespace}}::Channels, :search
+
+ # Posts
+ get "/api/v1/post/:id", {{namespace}}::Channels, :post
+ get "/api/v1/post/:id/comments", {{namespace}}::Channels, :post_comments
+
+ # 301 redirects to new /api/v1/channels/community/:ucid and /:ucid/community
+ get "/api/v1/channels/comments/:ucid", {{namespace}}::Channels, :channel_comments_redirect
+ get "/api/v1/channels/:ucid/comments", {{namespace}}::Channels, :channel_comments_redirect
+
+ # Search
+ get "/api/v1/search", {{namespace}}::Search, :search
+ get "/api/v1/search/suggestions", {{namespace}}::Search, :search_suggestions
+ get "/api/v1/hashtag/:hashtag", {{namespace}}::Search, :hashtag
+
+
+ # Authenticated
+
+ get "/api/v1/auth/preferences", {{namespace}}::Authenticated, :get_preferences
+ post "/api/v1/auth/preferences", {{namespace}}::Authenticated, :set_preferences
+
+ get "/api/v1/auth/export/invidious", {{namespace}}::Authenticated, :export_invidious
+ post "/api/v1/auth/import/invidious", {{namespace}}::Authenticated, :import_invidious
+
+ get "/api/v1/auth/history", {{namespace}}::Authenticated, :get_history
+ post "/api/v1/auth/history/:id", {{namespace}}::Authenticated, :mark_watched
+ delete "/api/v1/auth/history/:id", {{namespace}}::Authenticated, :mark_unwatched
+ delete "/api/v1/auth/history", {{namespace}}::Authenticated, :clear_history
+
+ get "/api/v1/auth/feed", {{namespace}}::Authenticated, :feed
+
+ get "/api/v1/auth/subscriptions", {{namespace}}::Authenticated, :get_subscriptions
+ post "/api/v1/auth/subscriptions/:ucid", {{namespace}}::Authenticated, :subscribe_channel
+ delete "/api/v1/auth/subscriptions/:ucid", {{namespace}}::Authenticated, :unsubscribe_channel
+
+ get "/api/v1/auth/playlists", {{namespace}}::Authenticated, :list_playlists
+ post "/api/v1/auth/playlists", {{namespace}}::Authenticated, :create_playlist
+ patch "/api/v1/auth/playlists/:plid",{{namespace}}:: Authenticated, :update_playlist_attribute
+ delete "/api/v1/auth/playlists/:plid", {{namespace}}::Authenticated, :delete_playlist
+ post "/api/v1/auth/playlists/:plid/videos", {{namespace}}::Authenticated, :insert_video_into_playlist
+ delete "/api/v1/auth/playlists/:plid/videos/:index", {{namespace}}::Authenticated, :delete_video_in_playlist
+
+ get "/api/v1/auth/tokens", {{namespace}}::Authenticated, :get_tokens
+ post "/api/v1/auth/tokens/register", {{namespace}}::Authenticated, :register_token
+ post "/api/v1/auth/tokens/unregister", {{namespace}}::Authenticated, :unregister_token
+
+ if CONFIG.enable_user_notifications
+ get "/api/v1/auth/notifications", {{namespace}}::Authenticated, :notifications
+ post "/api/v1/auth/notifications", {{namespace}}::Authenticated, :notifications
+ end
+
+ # Misc
+ get "/api/v1/stats", {{namespace}}::Misc, :stats
+ get "/api/v1/playlists/:plid", {{namespace}}::Misc, :get_playlist
+ get "/api/v1/auth/playlists/:plid", {{namespace}}::Misc, :get_playlist
+ get "/api/v1/mixes/:rdid", {{namespace}}::Misc, :mixes
+ get "/api/v1/resolveurl", {{namespace}}::Misc, :resolve_url
+ {% end %}
end
end
diff --git a/src/invidious/search.cr b/src/invidious/search.cr
deleted file mode 100644
index 4b216613..00000000
--- a/src/invidious/search.cr
+++ /dev/null
@@ -1,473 +0,0 @@
-struct SearchVideo
- include DB::Serializable
-
- property title : String
- property id : String
- property author : String
- property ucid : String
- property published : Time
- property views : Int64
- property description_html : String
- property length_seconds : Int32
- property live_now : Bool
- property paid : Bool
- property premium : Bool
- property premiere_timestamp : Time?
-
- def to_xml(auto_generated, query_params, xml : XML::Builder)
- query_params["v"] = self.id
-
- xml.element("entry") do
- xml.element("id") { xml.text "yt:video:#{self.id}" }
- xml.element("yt:videoId") { xml.text self.id }
- xml.element("yt:channelId") { xml.text self.ucid }
- xml.element("title") { xml.text self.title }
- xml.element("link", rel: "alternate", href: "#{HOST_URL}/watch?#{query_params}")
-
- xml.element("author") do
- if auto_generated
- xml.element("name") { xml.text self.author }
- xml.element("uri") { xml.text "#{HOST_URL}/channel/#{self.ucid}" }
- else
- xml.element("name") { xml.text author }
- xml.element("uri") { xml.text "#{HOST_URL}/channel/#{ucid}" }
- end
- end
-
- xml.element("content", type: "xhtml") do
- xml.element("div", xmlns: "http://www.w3.org/1999/xhtml") do
- xml.element("a", href: "#{HOST_URL}/watch?#{query_params}") do
- xml.element("img", src: "#{HOST_URL}/vi/#{self.id}/mqdefault.jpg")
- end
-
- xml.element("p", style: "word-break:break-word;white-space:pre-wrap") { xml.text html_to_content(self.description_html) }
- end
- end
-
- xml.element("published") { xml.text self.published.to_s("%Y-%m-%dT%H:%M:%S%:z") }
-
- xml.element("media:group") do
- xml.element("media:title") { xml.text self.title }
- xml.element("media:thumbnail", url: "#{HOST_URL}/vi/#{self.id}/mqdefault.jpg",
- width: "320", height: "180")
- xml.element("media:description") { xml.text html_to_content(self.description_html) }
- end
-
- xml.element("media:community") do
- xml.element("media:statistics", views: self.views)
- end
- end
- end
-
- def to_xml(auto_generated, query_params, xml : XML::Builder | Nil = nil)
- if xml
- to_xml(HOST_URL, auto_generated, query_params, xml)
- else
- XML.build do |json|
- to_xml(HOST_URL, auto_generated, query_params, xml)
- end
- end
- end
-
- def to_json(locale, json : JSON::Builder)
- json.object do
- json.field "type", "video"
- json.field "title", self.title
- json.field "videoId", self.id
-
- json.field "author", self.author
- json.field "authorId", self.ucid
- json.field "authorUrl", "/channel/#{self.ucid}"
-
- json.field "videoThumbnails" do
- generate_thumbnails(json, self.id)
- end
-
- json.field "description", html_to_content(self.description_html)
- json.field "descriptionHtml", self.description_html
-
- json.field "viewCount", self.views
- json.field "published", self.published.to_unix
- json.field "publishedText", translate(locale, "`x` ago", recode_date(self.published, locale))
- json.field "lengthSeconds", self.length_seconds
- json.field "liveNow", self.live_now
- json.field "paid", self.paid
- json.field "premium", self.premium
- json.field "isUpcoming", self.is_upcoming
-
- if self.premiere_timestamp
- json.field "premiereTimestamp", self.premiere_timestamp.try &.to_unix
- end
- end
- end
-
- def to_json(locale, json : JSON::Builder | Nil = nil)
- if json
- to_json(locale, json)
- else
- JSON.build do |json|
- to_json(locale, json)
- end
- end
- end
-
- def is_upcoming
- premiere_timestamp ? true : false
- end
-end
-
-struct SearchPlaylistVideo
- include DB::Serializable
-
- property title : String
- property id : String
- property length_seconds : Int32
-end
-
-struct SearchPlaylist
- include DB::Serializable
-
- property title : String
- property id : String
- property author : String
- property ucid : String
- property video_count : Int32
- property videos : Array(SearchPlaylistVideo)
- property thumbnail : String?
-
- def to_json(locale, json : JSON::Builder)
- json.object do
- json.field "type", "playlist"
- json.field "title", self.title
- json.field "playlistId", self.id
- json.field "playlistThumbnail", self.thumbnail
-
- json.field "author", self.author
- json.field "authorId", self.ucid
- json.field "authorUrl", "/channel/#{self.ucid}"
-
- json.field "videoCount", self.video_count
- json.field "videos" do
- json.array do
- self.videos.each do |video|
- json.object do
- json.field "title", video.title
- json.field "videoId", video.id
- json.field "lengthSeconds", video.length_seconds
-
- json.field "videoThumbnails" do
- generate_thumbnails(json, video.id)
- end
- end
- end
- end
- end
- end
- end
-
- def to_json(locale, json : JSON::Builder | Nil = nil)
- if json
- to_json(locale, json)
- else
- JSON.build do |json|
- to_json(locale, json)
- end
- end
- end
-end
-
-struct SearchChannel
- include DB::Serializable
-
- property author : String
- property ucid : String
- property author_thumbnail : String
- property subscriber_count : Int32
- property video_count : Int32
- property description_html : String
- property auto_generated : Bool
-
- def to_json(locale, json : JSON::Builder)
- json.object do
- json.field "type", "channel"
- json.field "author", self.author
- json.field "authorId", self.ucid
- json.field "authorUrl", "/channel/#{self.ucid}"
-
- json.field "authorThumbnails" do
- json.array do
- qualities = {32, 48, 76, 100, 176, 512}
-
- qualities.each do |quality|
- json.object do
- json.field "url", self.author_thumbnail.gsub(/=\d+/, "=s#{quality}")
- json.field "width", quality
- json.field "height", quality
- end
- end
- end
- end
-
- json.field "autoGenerated", self.auto_generated
- json.field "subCount", self.subscriber_count
- json.field "videoCount", self.video_count
-
- json.field "description", html_to_content(self.description_html)
- json.field "descriptionHtml", self.description_html
- end
- end
-
- def to_json(locale, json : JSON::Builder | Nil = nil)
- if json
- to_json(locale, json)
- else
- JSON.build do |json|
- to_json(locale, json)
- end
- end
- end
-end
-
-alias SearchItem = SearchVideo | SearchChannel | SearchPlaylist
-
-def channel_search(query, page, channel)
- response = YT_POOL.client &.get("/channel/#{channel}")
-
- if response.status_code == 404
- response = YT_POOL.client &.get("/user/#{channel}")
- response = YT_POOL.client &.get("/c/#{channel}") if response.status_code == 404
- initial_data = extract_initial_data(response.body)
- ucid = initial_data["header"]["c4TabbedHeaderRenderer"]?.try &.["channelId"].as_s?
- raise InfoException.new("Impossible to extract channel ID from page") if !ucid
- else
- ucid = channel
- end
-
- continuation = produce_channel_search_continuation(ucid, query, page)
- response_json = request_youtube_api_browse(continuation)
-
- result = JSON.parse(response_json)
- continuationItems = result["onResponseReceivedActions"]?
- .try &.[0]["appendContinuationItemsAction"]["continuationItems"]
-
- return 0, [] of SearchItem if !continuationItems
-
- items = [] of SearchItem
- continuationItems.as_a.select(&.as_h.has_key?("itemSectionRenderer")).each { |item|
- extract_item(item["itemSectionRenderer"]["contents"].as_a[0])
- .try { |t| items << t }
- }
-
- return items.size, items
-end
-
-def search(query, search_params = produce_search_params(content_type: "all"), region = nil)
- return 0, [] of SearchItem if query.empty?
-
- body = YT_POOL.client(region, &.get("/results?search_query=#{URI.encode_www_form(query)}&sp=#{search_params}&hl=en").body)
- return 0, [] of SearchItem if body.empty?
-
- initial_data = extract_initial_data(body)
- items = extract_items(initial_data)
-
- # initial_data["estimatedResults"]?.try &.as_s.to_i64
-
- return items.size, items
-end
-
-def produce_search_params(page = 1, sort : String = "relevance", date : String = "", content_type : String = "",
- duration : String = "", features : Array(String) = [] of String)
- object = {
- "1:varint" => 0_i64,
- "2:embedded" => {} of String => Int64,
- "9:varint" => ((page - 1) * 20).to_i64,
- }
-
- case sort
- when "relevance"
- object["1:varint"] = 0_i64
- when "rating"
- object["1:varint"] = 1_i64
- when "upload_date", "date"
- object["1:varint"] = 2_i64
- when "view_count", "views"
- object["1:varint"] = 3_i64
- else
- raise "No sort #{sort}"
- end
-
- case date
- when "hour"
- object["2:embedded"].as(Hash)["1:varint"] = 1_i64
- when "today"
- object["2:embedded"].as(Hash)["1:varint"] = 2_i64
- when "week"
- object["2:embedded"].as(Hash)["1:varint"] = 3_i64
- when "month"
- object["2:embedded"].as(Hash)["1:varint"] = 4_i64
- when "year"
- object["2:embedded"].as(Hash)["1:varint"] = 5_i64
- else nil # Ignore
- end
-
- case content_type
- when "video"
- object["2:embedded"].as(Hash)["2:varint"] = 1_i64
- when "channel"
- object["2:embedded"].as(Hash)["2:varint"] = 2_i64
- when "playlist"
- object["2:embedded"].as(Hash)["2:varint"] = 3_i64
- when "movie"
- object["2:embedded"].as(Hash)["2:varint"] = 4_i64
- when "show"
- object["2:embedded"].as(Hash)["2:varint"] = 5_i64
- when "all"
- #
- else
- object["2:embedded"].as(Hash)["2:varint"] = 1_i64
- end
-
- case duration
- when "short"
- object["2:embedded"].as(Hash)["3:varint"] = 1_i64
- when "long"
- object["2:embedded"].as(Hash)["3:varint"] = 2_i64
- else nil # Ignore
- end
-
- features.each do |feature|
- case feature
- when "hd"
- object["2:embedded"].as(Hash)["4:varint"] = 1_i64
- when "subtitles"
- object["2:embedded"].as(Hash)["5:varint"] = 1_i64
- when "creative_commons", "cc"
- object["2:embedded"].as(Hash)["6:varint"] = 1_i64
- when "3d"
- object["2:embedded"].as(Hash)["7:varint"] = 1_i64
- when "live", "livestream"
- object["2:embedded"].as(Hash)["8:varint"] = 1_i64
- when "purchased"
- object["2:embedded"].as(Hash)["9:varint"] = 1_i64
- when "4k"
- object["2:embedded"].as(Hash)["14:varint"] = 1_i64
- when "360"
- object["2:embedded"].as(Hash)["15:varint"] = 1_i64
- when "location"
- object["2:embedded"].as(Hash)["23:varint"] = 1_i64
- when "hdr"
- object["2:embedded"].as(Hash)["25:varint"] = 1_i64
- else nil # Ignore
- end
- end
-
- if object["2:embedded"].as(Hash).empty?
- object.delete("2:embedded")
- end
-
- params = object.try { |i| Protodec::Any.cast_json(object) }
- .try { |i| Protodec::Any.from_json(i) }
- .try { |i| Base64.urlsafe_encode(i) }
- .try { |i| URI.encode_www_form(i) }
-
- return params
-end
-
-def produce_channel_search_continuation(ucid, query, page)
- if page <= 1
- idx = 0_i64
- else
- idx = 30_i64 * (page - 1)
- end
-
- object = {
- "80226972:embedded" => {
- "2:string" => ucid,
- "3:base64" => {
- "2:string" => "search",
- "6:varint" => 1_i64,
- "7:varint" => 1_i64,
- "12:varint" => 1_i64,
- "15:base64" => {
- "3:varint" => idx,
- },
- "23:varint" => 0_i64,
- },
- "11:string" => query,
- "35:string" => "browse-feed#{ucid}search",
- },
- }
-
- continuation = object.try { |i| Protodec::Any.cast_json(object) }
- .try { |i| Protodec::Any.from_json(i) }
- .try { |i| Base64.urlsafe_encode(i) }
- .try { |i| URI.encode_www_form(i) }
-
- return continuation
-end
-
-def process_search_query(query, page, user, region)
- if user
- user = user.as(User)
- view_name = "subscriptions_#{sha256(user.email)}"
- end
-
- channel = nil
- content_type = "all"
- date = ""
- duration = ""
- features = [] of String
- sort = "relevance"
- subscriptions = nil
-
- operators = query.split(" ").select { |a| a.match(/\w+:[\w,]+/) }
- operators.each do |operator|
- key, value = operator.downcase.split(":")
-
- case key
- when "channel", "user"
- channel = operator.split(":")[-1]
- when "content_type", "type"
- content_type = value
- when "date"
- date = value
- when "duration"
- duration = value
- when "feature", "features"
- features = value.split(",")
- when "sort"
- sort = value
- when "subscriptions"
- subscriptions = value == "true"
- else
- operators.delete(operator)
- end
- end
-
- search_query = (query.split(" ") - operators).join(" ")
-
- if channel
- count, items = channel_search(search_query, page, channel)
- elsif subscriptions
- if view_name
- items = PG_DB.query_all("SELECT id,title,published,updated,ucid,author,length_seconds FROM (
- SELECT *,
- to_tsvector(#{view_name}.title) ||
- to_tsvector(#{view_name}.author)
- as document
- FROM #{view_name}
- ) v_search WHERE v_search.document @@ plainto_tsquery($1) LIMIT 20 OFFSET $2;", search_query, (page - 1) * 20, as: ChannelVideo)
- count = items.size
- else
- items = [] of ChannelVideo
- count = 0
- end
- else
- search_params = produce_search_params(page: page, sort: sort, date: date, content_type: content_type,
- duration: duration, features: features)
-
- count, items = search(search_query, search_params, region).as(Tuple)
- end
-
- {search_query, count, items, operators}
-end
diff --git a/src/invidious/search/ctoken.cr b/src/invidious/search/ctoken.cr
new file mode 100644
index 00000000..161065e0
--- /dev/null
+++ b/src/invidious/search/ctoken.cr
@@ -0,0 +1,32 @@
+def produce_channel_search_continuation(ucid, query, page)
+ if page <= 1
+ idx = 0_i64
+ else
+ idx = 30_i64 * (page - 1)
+ end
+
+ object = {
+ "80226972:embedded" => {
+ "2:string" => ucid,
+ "3:base64" => {
+ "2:string" => "search",
+ "6:varint" => 1_i64,
+ "7:varint" => 1_i64,
+ "12:varint" => 1_i64,
+ "15:base64" => {
+ "3:varint" => idx,
+ },
+ "23:varint" => 0_i64,
+ },
+ "11:string" => query,
+ "35:string" => "browse-feed#{ucid}search",
+ },
+ }
+
+ continuation = object.try { |i| Protodec::Any.cast_json(i) }
+ .try { |i| Protodec::Any.from_json(i) }
+ .try { |i| Base64.urlsafe_encode(i) }
+ .try { |i| URI.encode_www_form(i) }
+
+ return continuation
+end
diff --git a/src/invidious/search/filters.cr b/src/invidious/search/filters.cr
new file mode 100644
index 00000000..bf968734
--- /dev/null
+++ b/src/invidious/search/filters.cr
@@ -0,0 +1,376 @@
+require "protodec/utils"
+require "http/params"
+
+module Invidious::Search
+ struct Filters
+ # Values correspond to { "2:embedded": { "1:varint": <X> }}
+ # except for "None" which is only used by us (= nothing selected)
+ enum Date
+ None = 0
+ Hour = 1
+ Today = 2
+ Week = 3
+ Month = 4
+ Year = 5
+ end
+
+ # Values correspond to { "2:embedded": { "2:varint": <X> }}
+ # except for "All" which is only used by us (= nothing selected)
+ enum Type
+ All = 0
+ Video = 1
+ Channel = 2
+ Playlist = 3
+ Movie = 4
+
+ # Has it been removed?
+ # (Not available on youtube's UI)
+ Show = 5
+ end
+
+ # Values correspond to { "2:embedded": { "3:varint": <X> }}
+ # except for "None" which is only used by us (= nothing selected)
+ enum Duration
+ None = 0
+ Short = 1 # "Under 4 minutes"
+ Long = 2 # "Over 20 minutes"
+ Medium = 3 # "4 - 20 minutes"
+ end
+
+ # Note: flag enums automatically generate
+ # "none" and "all" members
+ @[Flags]
+ enum Features
+ Live
+ FourK # "4K"
+ HD
+ Subtitles # "Subtitles/CC"
+ CCommons # "Creative Commons"
+ ThreeSixty # "360°"
+ VR180
+ ThreeD # "3D"
+ HDR
+ Location
+ Purchased
+ end
+
+ # Values correspond to { "1:varint": <X> }
+ enum Sort
+ Relevance = 0
+ Rating = 1
+ Date = 2
+ Views = 3
+ end
+
+ # Parameters are sorted as on Youtube
+ property date : Date
+ property type : Type
+ property duration : Duration
+ property features : Features
+ property sort : Sort
+
+ def initialize(
+ *, # All parameters must be named
+ @date : Date = Date::None,
+ @type : Type = Type::All,
+ @duration : Duration = Duration::None,
+ @features : Features = Features::None,
+ @sort : Sort = Sort::Relevance
+ )
+ end
+
+ def default? : Bool
+ return @date.none? && @type.all? && @duration.none? && \
+ @features.none? && @sort.relevance?
+ end
+
+ # -------------------
+ # Invidious params
+ # -------------------
+
+ def self.parse_features(raw : Array(String)) : Features
+ # Initialize return variable
+ features = Features.new(0)
+
+ raw.each do |ft|
+ case ft.downcase
+ when "live", "livestream"
+ features = features | Features::Live
+ when "4k" then features = features | Features::FourK
+ when "hd" then features = features | Features::HD
+ when "subtitles" then features = features | Features::Subtitles
+ when "creative_commons", "commons", "cc"
+ features = features | Features::CCommons
+ when "360" then features = features | Features::ThreeSixty
+ when "vr180" then features = features | Features::VR180
+ when "3d" then features = features | Features::ThreeD
+ when "hdr" then features = features | Features::HDR
+ when "location" then features = features | Features::Location
+ when "purchased" then features = features | Features::Purchased
+ end
+ end
+
+ return features
+ end
+
+ def self.format_features(features : Features) : String
+ # Directly return an empty string if there are no features
+ return "" if features.none?
+
+ # Initialize return variable
+ str = [] of String
+
+ str << "live" if features.live?
+ str << "4k" if features.four_k?
+ str << "hd" if features.hd?
+ str << "subtitles" if features.subtitles?
+ str << "commons" if features.c_commons?
+ str << "360" if features.three_sixty?
+ str << "vr180" if features.vr180?
+ str << "3d" if features.three_d?
+ str << "hdr" if features.hdr?
+ str << "location" if features.location?
+ str << "purchased" if features.purchased?
+
+ return str.join(',')
+ end
+
+ def self.from_legacy_filters(str : String) : {Filters, String, String, Bool}
+ # Split search query on spaces
+ members = str.split(' ')
+
+ # Output variables
+ channel = ""
+ filters = Filters.new
+ subscriptions = false
+
+ # Array to hold the non-filter members
+ query = [] of String
+
+ # Parse!
+ members.each do |substr|
+ # Separator operators
+ operators = substr.split(':')
+
+ case operators[0]
+ when "user", "channel"
+ next if operators.size != 2
+ channel = operators[1]
+ #
+ when "type", "content_type"
+ next if operators.size != 2
+ type = Type.parse?(operators[1])
+ filters.type = type if !type.nil?
+ #
+ when "date"
+ next if operators.size != 2
+ date = Date.parse?(operators[1])
+ filters.date = date if !date.nil?
+ #
+ when "duration"
+ next if operators.size != 2
+ duration = Duration.parse?(operators[1])
+ filters.duration = duration if !duration.nil?
+ #
+ when "feature", "features"
+ next if operators.size != 2
+ features = parse_features(operators[1].split(','))
+ filters.features = features if !features.nil?
+ #
+ when "sort"
+ next if operators.size != 2
+ sort = Sort.parse?(operators[1])
+ filters.sort = sort if !sort.nil?
+ #
+ when "subscriptions"
+ next if operators.size != 2
+ subscriptions = {"true", "on", "yes", "1"}.any?(&.== operators[1])
+ #
+ else
+ query << substr
+ end
+ end
+
+ # Re-assemble query (without filters)
+ cleaned_query = query.join(' ')
+
+ return {filters, channel, cleaned_query, subscriptions}
+ end
+
+ def self.from_iv_params(params : HTTP::Params) : Filters
+ # Temporary variables
+ filters = Filters.new
+
+ if type = params["type"]?
+ filters.type = Type.parse?(type) || Type::All
+ params.delete("type")
+ end
+
+ if date = params["date"]?
+ filters.date = Date.parse?(date) || Date::None
+ params.delete("date")
+ end
+
+ if duration = params["duration"]?
+ filters.duration = Duration.parse?(duration) || Duration::None
+ params.delete("duration")
+ end
+
+ features = params.fetch_all("features")
+ if !features.empty?
+ # Un-array input so it can be treated as a comma-separated list
+ features = features[0].split(',') if features.size == 1
+
+ filters.features = parse_features(features) || Features::None
+ params.delete_all("features")
+ end
+
+ if sort = params["sort"]?
+ filters.sort = Sort.parse?(sort) || Sort::Relevance
+ params.delete("sort")
+ end
+
+ return filters
+ end
+
+ def to_iv_params : HTTP::Params
+ # Temporary variables
+ raw_params = {} of String => Array(String)
+
+ raw_params["date"] = [@date.to_s.underscore] if !@date.none?
+ raw_params["type"] = [@type.to_s.underscore] if !@type.all?
+ raw_params["sort"] = [@sort.to_s.underscore] if !@sort.relevance?
+
+ if !@duration.none?
+ raw_params["duration"] = [@duration.to_s.underscore]
+ end
+
+ if !@features.none?
+ raw_params["features"] = [Filters.format_features(@features)]
+ end
+
+ return HTTP::Params.new(raw_params)
+ end
+
+ # -------------------
+ # Youtube params
+ # -------------------
+
+ # Produce the youtube search parameters for the
+ # innertube API (base64-encoded protobuf object).
+ def to_yt_params(page : Int = 1) : String
+ # Initialize the embedded protobuf object
+ embedded = {} of String => Int64
+
+ # Add these field only if associated parameter is selected
+ embedded["1:varint"] = @date.to_i64 if !@date.none?
+ embedded["2:varint"] = @type.to_i64 if !@type.all?
+ embedded["3:varint"] = @duration.to_i64 if !@duration.none?
+
+ if !@features.none?
+ # All features have a value of "1" when enabled, and
+ # the field is omitted when the feature is no selected.
+ embedded["4:varint"] = 1_i64 if @features.includes?(Features::HD)
+ embedded["5:varint"] = 1_i64 if @features.includes?(Features::Subtitles)
+ embedded["6:varint"] = 1_i64 if @features.includes?(Features::CCommons)
+ embedded["7:varint"] = 1_i64 if @features.includes?(Features::ThreeD)
+ embedded["8:varint"] = 1_i64 if @features.includes?(Features::Live)
+ embedded["9:varint"] = 1_i64 if @features.includes?(Features::Purchased)
+ embedded["14:varint"] = 1_i64 if @features.includes?(Features::FourK)
+ embedded["15:varint"] = 1_i64 if @features.includes?(Features::ThreeSixty)
+ embedded["23:varint"] = 1_i64 if @features.includes?(Features::Location)
+ embedded["25:varint"] = 1_i64 if @features.includes?(Features::HDR)
+ embedded["26:varint"] = 1_i64 if @features.includes?(Features::VR180)
+ end
+
+ # Initialize an empty protobuf object
+ object = {} of String => (Int64 | String | Hash(String, Int64))
+
+ # As usual, everything can be omitted if it has no value
+ object["2:embedded"] = embedded if !embedded.empty?
+
+ # Default sort is "relevance", so when this option is selected,
+ # the associated field can be omitted.
+ if !@sort.relevance?
+ object["1:varint"] = @sort.to_i64
+ end
+
+ # Add page number (if provided)
+ if page > 1
+ object["9:varint"] = ((page - 1) * 20).to_i64
+ end
+
+ # Prevent censoring of self harm topics
+ # See https://github.com/iv-org/invidious/issues/4398
+ object["30:varint"] = 1.to_i64
+
+ return object
+ .try { |i| Protodec::Any.cast_json(i) }
+ .try { |i| Protodec::Any.from_json(i) }
+ .try { |i| Base64.urlsafe_encode(i) }
+ .try { |i| URI.encode_www_form(i) }
+ end
+
+ # Function to parse the `sp` URL parameter from Youtube
+ # search page. It's a base64-encoded protobuf object.
+ def self.from_yt_params(params : HTTP::Params) : Filters
+ # Initialize output variable
+ filters = Filters.new
+
+ # Get parameter, and check emptyness
+ search_params = params["sp"]?
+
+ if search_params.nil? || search_params.empty?
+ return filters
+ end
+
+ # Decode protobuf object
+ object = search_params
+ .try { |i| URI.decode_www_form(i) }
+ .try { |i| Base64.decode(i) }
+ .try { |i| IO::Memory.new(i) }
+ .try { |i| Protodec::Any.parse(i) }
+
+ # Parse items from embedded object
+ if embedded = object["2:0:embedded"]?
+ # All the following fields (date, type, duration) are optional.
+ if date = embedded["1:0:varint"]?
+ filters.date = Date.from_value?(date.as_i) || Date::None
+ end
+
+ if type = embedded["2:0:varint"]?
+ filters.type = Type.from_value?(type.as_i) || Type::All
+ end
+
+ if duration = embedded["3:0:varint"]?
+ filters.duration = Duration.from_value?(duration.as_i) || Duration::None
+ end
+
+ # All features should have a value of "1" when enabled, and
+ # the field should be omitted when the feature is no selected.
+ features = 0
+ features += (embedded["4:0:varint"]?.try &.as_i == 1_i64) ? Features::HD.value : 0
+ features += (embedded["5:0:varint"]?.try &.as_i == 1_i64) ? Features::Subtitles.value : 0
+ features += (embedded["6:0:varint"]?.try &.as_i == 1_i64) ? Features::CCommons.value : 0
+ features += (embedded["7:0:varint"]?.try &.as_i == 1_i64) ? Features::ThreeD.value : 0
+ features += (embedded["8:0:varint"]?.try &.as_i == 1_i64) ? Features::Live.value : 0
+ features += (embedded["9:0:varint"]?.try &.as_i == 1_i64) ? Features::Purchased.value : 0
+ features += (embedded["14:0:varint"]?.try &.as_i == 1_i64) ? Features::FourK.value : 0
+ features += (embedded["15:0:varint"]?.try &.as_i == 1_i64) ? Features::ThreeSixty.value : 0
+ features += (embedded["23:0:varint"]?.try &.as_i == 1_i64) ? Features::Location.value : 0
+ features += (embedded["25:0:varint"]?.try &.as_i == 1_i64) ? Features::HDR.value : 0
+ features += (embedded["26:0:varint"]?.try &.as_i == 1_i64) ? Features::VR180.value : 0
+
+ filters.features = Features.from_value?(features) || Features::None
+ end
+
+ if sort = object["1:0:varint"]?
+ filters.sort = Sort.from_value?(sort.as_i) || Sort::Relevance
+ end
+
+ # Remove URL parameter and return result
+ params.delete("sp")
+ return filters
+ end
+ end
+end
diff --git a/src/invidious/search/processors.cr b/src/invidious/search/processors.cr
new file mode 100644
index 00000000..25edb936
--- /dev/null
+++ b/src/invidious/search/processors.cr
@@ -0,0 +1,56 @@
+module Invidious::Search
+ module Processors
+ extend self
+
+ # Regular search (`/search` endpoint)
+ def regular(query : Query) : Array(SearchItem)
+ search_params = query.filters.to_yt_params(page: query.page)
+
+ client_config = YoutubeAPI::ClientConfig.new(region: query.region)
+ initial_data = YoutubeAPI.search(query.text, search_params, client_config: client_config)
+
+ items, _ = extract_items(initial_data)
+ return items.reject!(Category)
+ end
+
+ # Search a youtube channel
+ # TODO: clean code, and rely more on YoutubeAPI
+ def channel(query : Query) : Array(SearchItem)
+ response = YT_POOL.client &.get("/channel/#{query.channel}")
+
+ if response.status_code == 404
+ response = YT_POOL.client &.get("/user/#{query.channel}")
+ response = YT_POOL.client &.get("/c/#{query.channel}") if response.status_code == 404
+ initial_data = extract_initial_data(response.body)
+ ucid = initial_data.dig?("header", "c4TabbedHeaderRenderer", "channelId").try(&.as_s?)
+ raise ChannelSearchException.new(query.channel) if !ucid
+ else
+ ucid = query.channel
+ end
+
+ continuation = produce_channel_search_continuation(ucid, query.text, query.page)
+ response_json = YoutubeAPI.browse(continuation)
+
+ items, _ = extract_items(response_json, "", ucid)
+ return items.reject!(Category)
+ end
+
+ # Search inside of user subscriptions
+ def subscriptions(query : Query, user : Invidious::User) : Array(ChannelVideo)
+ view_name = "subscriptions_#{sha256(user.email)}"
+
+ return PG_DB.query_all("
+ SELECT id,title,published,updated,ucid,author,length_seconds
+ FROM (
+ SELECT *,
+ to_tsvector(#{view_name}.title) ||
+ to_tsvector(#{view_name}.author)
+ as document
+ FROM #{view_name}
+ ) v_search WHERE v_search.document @@ plainto_tsquery($1) LIMIT 20 OFFSET $2;",
+ query.text, (query.page - 1) * 20,
+ as: ChannelVideo
+ )
+ end
+ end
+end
diff --git a/src/invidious/search/query.cr b/src/invidious/search/query.cr
new file mode 100644
index 00000000..c8e8cf7f
--- /dev/null
+++ b/src/invidious/search/query.cr
@@ -0,0 +1,168 @@
+module Invidious::Search
+ class Query
+ enum Type
+ # Types related to YouTube
+ Regular # Youtube search page
+ Channel # Youtube channel search box
+
+ # Types specific to Invidious
+ Subscriptions # Search user subscriptions
+ Playlist # "Add playlist item" search
+ end
+
+ getter type : Type = Type::Regular
+
+ @raw_query : String
+ @query : String = ""
+
+ property filters : Filters = Filters.new
+ property page : Int32
+ property region : String?
+ property channel : String = ""
+
+ # Flag that indicates if the smart search features have been disabled.
+ @inhibit_ssf : Bool = false
+
+ # Return true if @raw_query is either `nil` or empty
+ private def empty_raw_query?
+ return @raw_query.empty?
+ end
+
+ # Same as `empty_raw_query?`, but named for external use
+ def empty?
+ return self.empty_raw_query?
+ end
+
+ # Getter for the query string.
+ # It is named `text` to reduce confusion (`search_query.text` makes more
+ # sense than `search_query.query`)
+ def text
+ return @query
+ end
+
+ # Initialize a new search query.
+ # Parameters are used to get the query string, the page number
+ # and the search filters (if any). Type tells this function
+ # where it is being called from (See `Type` above).
+ def initialize(
+ params : HTTP::Params,
+ @type : Type = Type::Regular,
+ @region : String? = nil
+ )
+ # Get the raw search query string (common to all search types). In
+ # Regular search mode, also look for the `search_query` URL parameter
+ _raw_query = params["q"]?
+ _raw_query ||= params["search_query"]? if @type.regular?
+ _raw_query ||= ""
+
+ # Remove surrounding whitespaces. Mostly useful for copy/pasted URLs.
+ @raw_query = _raw_query.strip
+
+ # Check for smart features (ex: URL search) inhibitor (backslash).
+ # If inhibitor is present, remove it.
+ if @raw_query.starts_with?('\\')
+ @inhibit_ssf = true
+ @raw_query = @raw_query[1..]
+ end
+
+ # Get the page number (also common to all search types)
+ @page = params["page"]?.try &.to_i? || 1
+
+ # Stop here if raw query is empty
+ # NOTE: maybe raise in the future?
+ return if self.empty_raw_query?
+
+ # Specific handling
+ case @type
+ when .channel?
+ # In "channel search" mode, filters are ignored, but we still parse
+ # the query prevent transmission of legacy filters to youtube.
+ #
+ _, _, @query, _ = Filters.from_legacy_filters(@raw_query)
+ #
+ when .playlist?
+ # In "add playlist item" mode, filters are parsed from the query
+ # string itself (legacy), and the channel is ignored.
+ #
+ @filters, _, @query, _ = Filters.from_legacy_filters(@raw_query)
+ #
+ when .subscriptions?, .regular?
+ if params["sp"]?
+ # Parse the `sp` URL parameter (youtube compatibility)
+ @filters = Filters.from_yt_params(params)
+ @query = @raw_query || ""
+ else
+ # Parse invidious URL parameters (sort, date, etc...)
+ @filters = Filters.from_iv_params(params)
+ @channel = params["channel"]? || ""
+
+ if @filters.default? && @raw_query.index(/\w:\w/)
+ # Parse legacy filters from query
+ @filters, @channel, @query, subs = Filters.from_legacy_filters(@raw_query)
+ else
+ @query = @raw_query || ""
+ end
+
+ if !@channel.empty?
+ # Switch to channel search mode (filters will be ignored)
+ @type = Type::Channel
+ elsif subs
+ # Switch to subscriptions search mode
+ @type = Type::Subscriptions
+ end
+ end
+ end
+ end
+
+ # Run the search query using the corresponding search processor.
+ # Returns either the results or an empty array of `SearchItem`.
+ def process(user : Invidious::User? = nil) : Array(SearchItem) | Array(ChannelVideo)
+ items = [] of SearchItem
+
+ # Don't bother going further if search query is empty
+ return items if self.empty_raw_query?
+
+ case @type
+ when .regular?, .playlist?
+ items = Processors.regular(self)
+ #
+ when .channel?
+ items = Processors.channel(self)
+ #
+ when .subscriptions?
+ if user
+ items = Processors.subscriptions(self, user.as(Invidious::User))
+ end
+ end
+
+ return items
+ end
+
+ # Return the HTTP::Params corresponding to this Query (invidious format)
+ def to_http_params : HTTP::Params
+ params = @filters.to_iv_params
+
+ params["q"] = @query
+ params["channel"] = @channel if !@channel.empty?
+
+ return params
+ end
+
+ # Checks if the query is a standalone URL
+ def url? : Bool
+ # If the smart features have been inhibited, don't go further.
+ return false if @inhibit_ssf
+
+ # Only supported in regular search mode
+ return false if !@type.regular?
+
+ # If filters are present, that's a regular search
+ return false if !@filters.default?
+
+ # Simple heuristics: domain name
+ return @raw_query.starts_with?(
+ /(https?:\/\/)?(www\.)?(m\.)?youtu(\.be|be\.com)\//
+ )
+ end
+ end
+end
diff --git a/src/invidious/trending.cr b/src/invidious/trending.cr
index 910a99d8..107d148d 100644
--- a/src/invidious/trending.cr
+++ b/src/invidious/trending.cr
@@ -2,42 +2,41 @@ def fetch_trending(trending_type, region, locale)
region ||= "US"
region = region.upcase
- trending = ""
plid = nil
- if trending_type && trending_type != "Default"
- if trending_type == "Music"
- trending_type = 1
- elsif trending_type == "Gaming"
- trending_type = 2
- elsif trending_type == "Movies"
- trending_type = 3
- end
+ case trending_type.try &.downcase
+ when "music"
+ params = "4gINGgt5dG1hX2NoYXJ0cw%3D%3D"
+ when "gaming"
+ params = "4gIcGhpnYW1pbmdfY29ycHVzX21vc3RfcG9wdWxhcg%3D%3D"
+ when "movies"
+ params = "4gIKGgh0cmFpbGVycw%3D%3D"
+ else # Default
+ params = ""
+ end
- response = YT_POOL.client &.get("/feed/trending?gl=#{region}&hl=en").body
+ client_config = YoutubeAPI::ClientConfig.new(region: region)
+ initial_data = YoutubeAPI.browse("FEtrending", params: params, client_config: client_config)
- initial_data = extract_initial_data(response)
- url = initial_data["contents"]["twoColumnBrowseResultsRenderer"]["tabs"][trending_type]["tabRenderer"]["endpoint"]["commandMetadata"]["webCommandMetadata"]["url"]
- url = "#{url}&gl=#{region}&hl=en"
+ items, _ = extract_items(initial_data)
- trending = YT_POOL.client &.get(url).body
- plid = extract_plid(url)
- else
- trending = YT_POOL.client &.get("/feed/trending?gl=#{region}&hl=en").body
- end
+ extracted = [] of SearchItem
- initial_data = extract_initial_data(trending)
- trending = extract_videos(initial_data)
+ deduplicate = items.size > 1
- return {trending, plid}
-end
+ items.each do |itm|
+ if itm.is_a?(Category)
+ # Ignore the smaller categories, as they generally contain a sponsored
+ # channel, which brings a lot of noise on the trending page.
+ # See: https://github.com/iv-org/invidious/issues/2989
+ next if (itm.contents.size < 24 && deduplicate)
+
+ extracted.concat extract_category(itm)
+ else
+ extracted << itm
+ end
+ end
-def extract_plid(url)
- return url.try { |i| URI.parse(i).query }
- .try { |i| HTTP::Params.parse(i)["bp"] }
- .try { |i| URI.decode_www_form(i) }
- .try { |i| Base64.decode(i) }
- .try { |i| IO::Memory.new(i) }
- .try { |i| Protodec::Any.parse(i) }
- .try &.["44:0:embedded"]?.try &.["2:1:string"]?.try &.as_s
+ # Deduplicate items before returning results
+ return extracted.select(SearchVideo).uniq!(&.id), plid
end
diff --git a/src/invidious/user/captcha.cr b/src/invidious/user/captcha.cr
new file mode 100644
index 00000000..8a0f67e5
--- /dev/null
+++ b/src/invidious/user/captcha.cr
@@ -0,0 +1,78 @@
+require "openssl/hmac"
+
+struct Invidious::User
+ module Captcha
+ extend self
+
+ private TEXTCAPTCHA_URL = URI.parse("https://textcaptcha.com")
+
+ def generate_image(key)
+ second = Random::Secure.rand(12)
+ second_angle = second * 30
+ second = second * 5
+
+ minute = Random::Secure.rand(12)
+ minute_angle = minute * 30
+ minute = minute * 5
+
+ hour = Random::Secure.rand(12)
+ hour_angle = hour * 30 + minute_angle.to_f / 12
+ if hour == 0
+ hour = 12
+ end
+
+ clock_svg = <<-END_SVG
+ <svg viewBox="0 0 100 100" width="200px" height="200px">
+ <circle cx="50" cy="50" r="45" fill="#eee" stroke="black" stroke-width="2"></circle>
+
+ <text x="69" y="20.091" text-anchor="middle" fill="black" font-family="Arial" font-size="10px"> 1</text>
+ <text x="82.909" y="34" text-anchor="middle" fill="black" font-family="Arial" font-size="10px"> 2</text>
+ <text x="88" y="53" text-anchor="middle" fill="black" font-family="Arial" font-size="10px"> 3</text>
+ <text x="82.909" y="72" text-anchor="middle" fill="black" font-family="Arial" font-size="10px"> 4</text>
+ <text x="69" y="85.909" text-anchor="middle" fill="black" font-family="Arial" font-size="10px"> 5</text>
+ <text x="50" y="91" text-anchor="middle" fill="black" font-family="Arial" font-size="10px"> 6</text>
+ <text x="31" y="85.909" text-anchor="middle" fill="black" font-family="Arial" font-size="10px"> 7</text>
+ <text x="17.091" y="72" text-anchor="middle" fill="black" font-family="Arial" font-size="10px"> 8</text>
+ <text x="12" y="53" text-anchor="middle" fill="black" font-family="Arial" font-size="10px"> 9</text>
+ <text x="17.091" y="34" text-anchor="middle" fill="black" font-family="Arial" font-size="10px">10</text>
+ <text x="31" y="20.091" text-anchor="middle" fill="black" font-family="Arial" font-size="10px">11</text>
+ <text x="50" y="15" text-anchor="middle" fill="black" font-family="Arial" font-size="10px">12</text>
+
+ <circle cx="50" cy="50" r="3" fill="black"></circle>
+ <line id="second" transform="rotate(#{second_angle}, 50, 50)" x1="50" y1="50" x2="50" y2="12" fill="black" stroke="black" stroke-width="1"></line>
+ <line id="minute" transform="rotate(#{minute_angle}, 50, 50)" x1="50" y1="50" x2="50" y2="16" fill="black" stroke="black" stroke-width="2"></line>
+ <line id="hour" transform="rotate(#{hour_angle}, 50, 50)" x1="50" y1="50" x2="50" y2="24" fill="black" stroke="black" stroke-width="2"></line>
+ </svg>
+ END_SVG
+
+ image = "data:image/png;base64,"
+ image += Process.run(%(rsvg-convert -w 400 -h 400 -b none -f png), shell: true,
+ input: IO::Memory.new(clock_svg), output: Process::Redirect::Pipe
+ ) do |proc|
+ Base64.strict_encode(proc.output.gets_to_end)
+ end
+
+ answer = "#{hour}:#{minute.to_s.rjust(2, '0')}:#{second.to_s.rjust(2, '0')}"
+ answer = OpenSSL::HMAC.hexdigest(:sha256, key, answer)
+
+ return {
+ question: image,
+ tokens: {generate_response(answer, {":login"}, key, use_nonce: true)},
+ }
+ end
+
+ def generate_text(key)
+ response = make_client(TEXTCAPTCHA_URL, &.get("/github.com/iv.org/invidious.json").body)
+ response = JSON.parse(response)
+
+ tokens = response["a"].as_a.map do |answer|
+ generate_response(answer.as_s, {":login"}, key, use_nonce: true)
+ end
+
+ return {
+ question: response["q"].as_s,
+ tokens: tokens,
+ }
+ end
+ end
+end
diff --git a/src/invidious/user/converters.cr b/src/invidious/user/converters.cr
new file mode 100644
index 00000000..dcbf8c53
--- /dev/null
+++ b/src/invidious/user/converters.cr
@@ -0,0 +1,12 @@
+def convert_theme(theme)
+ case theme
+ when "true"
+ "dark"
+ when "false"
+ "light"
+ when "", nil
+ nil
+ else
+ theme
+ end
+end
diff --git a/src/invidious/user/cookies.cr b/src/invidious/user/cookies.cr
new file mode 100644
index 00000000..654efc15
--- /dev/null
+++ b/src/invidious/user/cookies.cr
@@ -0,0 +1,39 @@
+require "http/cookie"
+
+struct Invidious::User
+ module Cookies
+ extend self
+
+ # Note: we use ternary operator because the two variables
+ # used in here are not booleans.
+ SECURE = (Kemal.config.ssl || CONFIG.https_only) ? true : false
+
+ # Session ID (SID) cookie
+ # Parameter "domain" comes from the global config
+ def sid(domain : String?, sid) : HTTP::Cookie
+ return HTTP::Cookie.new(
+ name: "SID",
+ domain: domain,
+ value: sid,
+ expires: Time.utc + 2.years,
+ secure: SECURE,
+ http_only: true,
+ samesite: HTTP::Cookie::SameSite::Lax
+ )
+ end
+
+ # Preferences (PREFS) cookie
+ # Parameter "domain" comes from the global config
+ def prefs(domain : String?, preferences : Preferences) : HTTP::Cookie
+ return HTTP::Cookie.new(
+ name: "PREFS",
+ domain: domain,
+ value: URI.encode_www_form(preferences.to_json),
+ expires: Time.utc + 2.years,
+ secure: SECURE,
+ http_only: false,
+ samesite: HTTP::Cookie::SameSite::Lax
+ )
+ end
+ end
+end
diff --git a/src/invidious/user/exports.cr b/src/invidious/user/exports.cr
new file mode 100644
index 00000000..b52503c9
--- /dev/null
+++ b/src/invidious/user/exports.cr
@@ -0,0 +1,35 @@
+struct Invidious::User
+ module Export
+ extend self
+
+ def to_invidious(user : User)
+ playlists = Invidious::Database::Playlists.select_like_iv(user.email)
+
+ return JSON.build do |json|
+ json.object do
+ json.field "subscriptions", user.subscriptions
+ json.field "watch_history", user.watched
+ json.field "preferences", user.preferences
+ json.field "playlists" do
+ json.array do
+ playlists.each do |playlist|
+ json.object do
+ json.field "title", playlist.title
+ json.field "description", html_to_content(playlist.description_html)
+ json.field "privacy", playlist.privacy.to_s
+ json.field "videos" do
+ json.array do
+ Invidious::Database::PlaylistVideos.select_ids(playlist.id, playlist.index, limit: CONFIG.playlist_length_limit).each do |video_id|
+ json.string video_id
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end # module
+end
diff --git a/src/invidious/user/imports.cr b/src/invidious/user/imports.cr
new file mode 100644
index 00000000..533c18d9
--- /dev/null
+++ b/src/invidious/user/imports.cr
@@ -0,0 +1,337 @@
+require "csv"
+
+struct Invidious::User
+ module Import
+ extend self
+
+ # Parse a youtube CSV subscription file
+ def parse_subscription_export_csv(csv_content : String)
+ rows = CSV.new(csv_content.strip('\n'), headers: true)
+ subscriptions = Array(String).new
+
+ # Counter to limit the amount of imports.
+ # This is intended to prevent DoS.
+ row_counter = 0
+
+ rows.each do |row|
+ # Limit to 1200
+ row_counter += 1
+ break if row_counter > 1_200
+
+ # Channel ID is the first column in the csv export we can't use the header
+ # name, because the header name is localized depending on the
+ # language the user has set on their account
+ channel_id = row[0].strip
+
+ next if channel_id.empty?
+ subscriptions << channel_id
+ end
+
+ return subscriptions
+ end
+
+ def parse_playlist_export_csv(user : User, raw_input : String)
+ # Split the input into head and body content
+ raw_head, raw_body = raw_input.strip('\n').split("\n\n", limit: 2, remove_empty: true)
+
+ # Create the playlist from the head content
+ csv_head = CSV.new(raw_head.strip('\n'), headers: true)
+ csv_head.next
+ title = csv_head[4]
+ description = csv_head[5]
+ visibility = csv_head[6]
+
+ if visibility.compare("Public", case_insensitive: true) == 0
+ privacy = PlaylistPrivacy::Public
+ else
+ privacy = PlaylistPrivacy::Private
+ end
+
+ playlist = create_playlist(title, privacy, user)
+ Invidious::Database::Playlists.update_description(playlist.id, description)
+
+ # Add each video to the playlist from the body content
+ csv_body = CSV.new(raw_body.strip('\n'), headers: true)
+ csv_body.each do |row|
+ video_id = row[0]
+ if playlist
+ next if !video_id
+ next if video_id == "Video Id"
+
+ begin
+ video = get_video(video_id)
+ rescue ex
+ next
+ end
+
+ playlist_video = PlaylistVideo.new({
+ title: video.title,
+ id: video.id,
+ author: video.author,
+ ucid: video.ucid,
+ length_seconds: video.length_seconds,
+ published: video.published,
+ plid: playlist.id,
+ live_now: video.live_now,
+ index: Random::Secure.rand(0_i64..Int64::MAX),
+ })
+
+ Invidious::Database::PlaylistVideos.insert(playlist_video)
+ Invidious::Database::Playlists.update_video_added(playlist.id, playlist_video.index)
+ end
+ end
+
+ return playlist
+ end
+
+ # -------------------
+ # Invidious
+ # -------------------
+
+ # Import from another invidious account
+ def from_invidious(user : User, body : String)
+ data = JSON.parse(body)
+
+ if data["subscriptions"]?
+ user.subscriptions += data["subscriptions"].as_a.map(&.as_s)
+ user.subscriptions.uniq!
+ user.subscriptions = get_batch_channels(user.subscriptions)
+
+ Invidious::Database::Users.update_subscriptions(user)
+ end
+
+ if data["watch_history"]?
+ user.watched += data["watch_history"].as_a.map(&.as_s)
+ user.watched.reverse!.uniq!.reverse!
+ Invidious::Database::Users.update_watch_history(user)
+ end
+
+ if data["preferences"]?
+ user.preferences = Preferences.from_json(data["preferences"].to_json)
+ Invidious::Database::Users.update_preferences(user)
+ end
+
+ if playlists = data["playlists"]?.try &.as_a?
+ playlists.each do |item|
+ title = item["title"]?.try &.as_s?.try &.delete("<>")
+ description = item["description"]?.try &.as_s?.try &.delete("\r")
+ privacy = item["privacy"]?.try &.as_s?.try { |raw_pl_privacy_state| PlaylistPrivacy.parse? raw_pl_privacy_state }
+
+ next if !title
+ next if !description
+ next if !privacy
+
+ playlist = create_playlist(title, privacy, user)
+ Invidious::Database::Playlists.update_description(playlist.id, description)
+
+ item["videos"]?.try &.as_a?.try &.each_with_index do |video_id, idx|
+ if idx > CONFIG.playlist_length_limit
+ raise InfoException.new("Playlist cannot have more than #{CONFIG.playlist_length_limit} videos")
+ end
+
+ video_id = video_id.try &.as_s?
+ next if !video_id
+
+ begin
+ video = get_video(video_id, false)
+ rescue ex
+ next
+ end
+
+ playlist_video = PlaylistVideo.new({
+ title: video.title,
+ id: video.id,
+ author: video.author,
+ ucid: video.ucid,
+ length_seconds: video.length_seconds,
+ published: video.published,
+ plid: playlist.id,
+ live_now: video.live_now,
+ index: Random::Secure.rand(0_i64..Int64::MAX),
+ })
+
+ Invidious::Database::PlaylistVideos.insert(playlist_video)
+ Invidious::Database::Playlists.update_video_added(playlist.id, playlist_video.index)
+ end
+ end
+ end
+ end
+
+ # -------------------
+ # Youtube
+ # -------------------
+
+ private def opml?(mimetype : String, extension : String)
+ opml_mimetypes = [
+ "application/xml",
+ "text/xml",
+ "text/x-opml",
+ "text/x-opml+xml",
+ ]
+
+ opml_extensions = ["xml", "opml"]
+
+ return opml_mimetypes.any?(&.== mimetype) || opml_extensions.any?(&.== extension)
+ end
+
+ # Import subscribed channels from Youtube
+ # Returns success status
+ def from_youtube(user : User, body : String, filename : String, type : String) : Bool
+ extension = filename.split(".").last
+
+ if opml?(type, extension)
+ subscriptions = XML.parse(body)
+ user.subscriptions += subscriptions.xpath_nodes(%q(//outline[@type="rss"])).map do |channel|
+ channel["xmlUrl"].match!(/UC[a-zA-Z0-9_-]{22}/)[0]
+ end
+ elsif extension == "json" || type == "application/json"
+ subscriptions = JSON.parse(body)
+ user.subscriptions += subscriptions.as_a.compact_map do |entry|
+ entry["snippet"]["resourceId"]["channelId"].as_s
+ end
+ elsif extension == "csv" || type == "text/csv"
+ subscriptions = parse_subscription_export_csv(body)
+ user.subscriptions += subscriptions
+ else
+ return false
+ end
+
+ user.subscriptions.uniq!
+ user.subscriptions = get_batch_channels(user.subscriptions)
+
+ Invidious::Database::Users.update_subscriptions(user)
+ return true
+ end
+
+ def from_youtube_pl(user : User, body : String, filename : String, type : String) : Bool
+ extension = filename.split(".").last
+
+ if extension == "csv" || type == "text/csv"
+ playlist = parse_playlist_export_csv(user, body)
+ if playlist
+ return true
+ else
+ return false
+ end
+ else
+ return false
+ end
+ end
+
+ def from_youtube_wh(user : User, body : String, filename : String, type : String) : Bool
+ extension = filename.split(".").last
+
+ if extension == "json" || type == "application/json"
+ data = JSON.parse(body)
+ watched = data.as_a.compact_map do |item|
+ next unless url = item["titleUrl"]?
+ next unless match = url.as_s.match(/\?v=(?<video_id>[a-zA-Z0-9_-]+)$/)
+ match["video_id"]
+ end
+ watched.reverse! # YouTube have newest first
+ user.watched += watched
+ user.watched.uniq!
+ Invidious::Database::Users.update_watch_history(user)
+ return true
+ else
+ return false
+ end
+ end
+
+ # -------------------
+ # Freetube
+ # -------------------
+
+ def from_freetube(user : User, body : String)
+ # Legacy import?
+ matches = body.scan(/"channelId":"(?<channel_id>[a-zA-Z0-9_-]{24})"/)
+ subs = matches.map(&.["channel_id"])
+
+ if subs.empty?
+ profiles = body.split('\n', remove_empty: true)
+ profiles.each do |profile|
+ if data = JSON.parse(profile)["subscriptions"]?
+ subs += data.as_a.map(&.["id"].as_s)
+ end
+ end
+ end
+
+ user.subscriptions += subs
+ user.subscriptions.uniq!
+ user.subscriptions = get_batch_channels(user.subscriptions)
+
+ Invidious::Database::Users.update_subscriptions(user)
+ end
+
+ # -------------------
+ # Newpipe
+ # -------------------
+
+ def from_newpipe_subs(user : User, body : String)
+ data = JSON.parse(body)
+
+ user.subscriptions += data["subscriptions"].as_a.compact_map do |channel|
+ if match = channel["url"].as_s.match(/\/channel\/(?<channel>UC[a-zA-Z0-9_-]{22})/)
+ next match["channel"]
+ elsif match = channel["url"].as_s.match(/\/user\/(?<user>.+)/)
+ # Resolve URL using the API
+ resolved_url = YoutubeAPI.resolve_url("https://www.youtube.com/user/#{match["user"]}")
+ ucid = resolved_url.dig?("endpoint", "browseEndpoint", "browseId")
+ next ucid.as_s if ucid
+ end
+
+ nil
+ end
+
+ user.subscriptions.uniq!
+ user.subscriptions = get_batch_channels(user.subscriptions)
+
+ Invidious::Database::Users.update_subscriptions(user)
+ end
+
+ def from_newpipe(user : User, body : String) : Bool
+ io = IO::Memory.new(body)
+
+ Compress::Zip::File.open(io) do |file|
+ file.entries.each do |entry|
+ entry.open do |file_io|
+ # Ensure max size of 4MB
+ io_sized = IO::Sized.new(file_io, 0x400000)
+
+ next if entry.filename != "newpipe.db"
+
+ tempfile = File.tempfile(".db")
+
+ begin
+ File.write(tempfile.path, io_sized.gets_to_end)
+ rescue
+ return false
+ end
+
+ db = DB.open("sqlite3://" + tempfile.path)
+
+ user.watched += db.query_all("SELECT url FROM streams", as: String)
+ .map(&.lchop("https://www.youtube.com/watch?v="))
+
+ user.watched.uniq!
+ Invidious::Database::Users.update_watch_history(user)
+
+ user.subscriptions += db.query_all("SELECT url FROM subscriptions", as: String)
+ .map(&.lchop("https://www.youtube.com/channel/"))
+
+ user.subscriptions.uniq!
+ user.subscriptions = get_batch_channels(user.subscriptions)
+
+ Invidious::Database::Users.update_subscriptions(user)
+
+ db.close
+ tempfile.delete
+ end
+ end
+ end
+
+ # Success!
+ return true
+ end
+ end # module
+end
diff --git a/src/invidious/user/preferences.cr b/src/invidious/user/preferences.cr
new file mode 100644
index 00000000..0a8525f3
--- /dev/null
+++ b/src/invidious/user/preferences.cr
@@ -0,0 +1,275 @@
+struct Preferences
+ include JSON::Serializable
+ include YAML::Serializable
+
+ property annotations : Bool = CONFIG.default_user_preferences.annotations
+ property annotations_subscribed : Bool = CONFIG.default_user_preferences.annotations_subscribed
+ property preload : Bool = CONFIG.default_user_preferences.preload
+ property autoplay : Bool = CONFIG.default_user_preferences.autoplay
+ property automatic_instance_redirect : Bool = CONFIG.default_user_preferences.automatic_instance_redirect
+
+ @[JSON::Field(converter: Preferences::StringToArray)]
+ @[YAML::Field(converter: Preferences::StringToArray)]
+ property captions : Array(String) = CONFIG.default_user_preferences.captions
+
+ @[JSON::Field(converter: Preferences::StringToArray)]
+ @[YAML::Field(converter: Preferences::StringToArray)]
+ property comments : Array(String) = CONFIG.default_user_preferences.comments
+ property continue : Bool = CONFIG.default_user_preferences.continue
+ property continue_autoplay : Bool = CONFIG.default_user_preferences.continue_autoplay
+
+ @[JSON::Field(converter: Preferences::BoolToString)]
+ @[YAML::Field(converter: Preferences::BoolToString)]
+ property dark_mode : String = CONFIG.default_user_preferences.dark_mode
+ property latest_only : Bool = CONFIG.default_user_preferences.latest_only
+ property listen : Bool = CONFIG.default_user_preferences.listen
+ property local : Bool = CONFIG.default_user_preferences.local
+ property watch_history : Bool = CONFIG.default_user_preferences.watch_history
+ property vr_mode : Bool = CONFIG.default_user_preferences.vr_mode
+ property show_nick : Bool = CONFIG.default_user_preferences.show_nick
+
+ @[JSON::Field(converter: Preferences::ProcessString)]
+ property locale : String = CONFIG.default_user_preferences.locale
+ property region : String? = CONFIG.default_user_preferences.region
+
+ @[JSON::Field(converter: Preferences::ClampInt)]
+ property max_results : Int32 = CONFIG.default_user_preferences.max_results
+ property notifications_only : Bool = CONFIG.default_user_preferences.notifications_only
+
+ @[JSON::Field(converter: Preferences::ProcessString)]
+ property player_style : String = CONFIG.default_user_preferences.player_style
+
+ @[JSON::Field(converter: Preferences::ProcessString)]
+ property quality : String = CONFIG.default_user_preferences.quality
+ @[JSON::Field(converter: Preferences::ProcessString)]
+ property quality_dash : String = CONFIG.default_user_preferences.quality_dash
+ property default_home : String? = CONFIG.default_user_preferences.default_home
+ property feed_menu : Array(String) = CONFIG.default_user_preferences.feed_menu
+ property related_videos : Bool = CONFIG.default_user_preferences.related_videos
+
+ @[JSON::Field(converter: Preferences::ProcessString)]
+ property sort : String = CONFIG.default_user_preferences.sort
+ property speed : Float32 = CONFIG.default_user_preferences.speed
+ property thin_mode : Bool = CONFIG.default_user_preferences.thin_mode
+ property unseen_only : Bool = CONFIG.default_user_preferences.unseen_only
+ property video_loop : Bool = CONFIG.default_user_preferences.video_loop
+ property extend_desc : Bool = CONFIG.default_user_preferences.extend_desc
+ property volume : Int32 = CONFIG.default_user_preferences.volume
+ property save_player_pos : Bool = CONFIG.default_user_preferences.save_player_pos
+
+ module BoolToString
+ def self.to_json(value : String, json : JSON::Builder)
+ json.string value
+ end
+
+ def self.from_json(value : JSON::PullParser) : String
+ begin
+ result = value.read_string
+
+ if result.empty?
+ CONFIG.default_user_preferences.dark_mode
+ else
+ result
+ end
+ rescue ex
+ if value.read_bool
+ "dark"
+ else
+ "light"
+ end
+ end
+ end
+
+ def self.to_yaml(value : String, yaml : YAML::Nodes::Builder)
+ yaml.scalar value
+ end
+
+ def self.from_yaml(ctx : YAML::ParseContext, node : YAML::Nodes::Node) : String
+ unless node.is_a?(YAML::Nodes::Scalar)
+ node.raise "Expected scalar, not #{node.class}"
+ end
+
+ case node.value
+ when "true"
+ "dark"
+ when "false"
+ "light"
+ when ""
+ CONFIG.default_user_preferences.dark_mode
+ else
+ node.value
+ end
+ end
+ end
+
+ module ClampInt
+ def self.to_json(value : Int32, json : JSON::Builder)
+ json.number value
+ end
+
+ def self.from_json(value : JSON::PullParser) : Int32
+ value.read_int.clamp(0, MAX_ITEMS_PER_PAGE).to_i32
+ end
+
+ def self.to_yaml(value : Int32, yaml : YAML::Nodes::Builder)
+ yaml.scalar value
+ end
+
+ def self.from_yaml(ctx : YAML::ParseContext, node : YAML::Nodes::Node) : Int32
+ node.value.clamp(0, MAX_ITEMS_PER_PAGE)
+ end
+ end
+
+ module FamilyConverter
+ def self.to_yaml(value : Socket::Family, yaml : YAML::Nodes::Builder)
+ case value
+ when Socket::Family::UNSPEC
+ yaml.scalar nil
+ when Socket::Family::INET
+ yaml.scalar "ipv4"
+ when Socket::Family::INET6
+ yaml.scalar "ipv6"
+ when Socket::Family::UNIX
+ raise "Invalid socket family #{value}"
+ end
+ end
+
+ def self.from_yaml(ctx : YAML::ParseContext, node : YAML::Nodes::Node) : Socket::Family
+ if node.is_a?(YAML::Nodes::Scalar)
+ case node.value.downcase
+ when "ipv4"
+ Socket::Family::INET
+ when "ipv6"
+ Socket::Family::INET6
+ else
+ Socket::Family::UNSPEC
+ end
+ else
+ node.raise "Expected scalar, not #{node.class}"
+ end
+ end
+ end
+
+ module URIConverter
+ def self.to_yaml(value : URI, yaml : YAML::Nodes::Builder)
+ yaml.scalar value.normalize!
+ end
+
+ def self.from_yaml(ctx : YAML::ParseContext, node : YAML::Nodes::Node) : URI
+ if node.is_a?(YAML::Nodes::Scalar)
+ URI.parse node.value
+ else
+ node.raise "Expected scalar, not #{node.class}"
+ end
+ end
+ end
+
+ module ProcessString
+ def self.to_json(value : String, json : JSON::Builder)
+ json.string value
+ end
+
+ def self.from_json(value : JSON::PullParser) : String
+ HTML.escape(value.read_string[0, 100])
+ end
+
+ def self.to_yaml(value : String, yaml : YAML::Nodes::Builder)
+ yaml.scalar value
+ end
+
+ def self.from_yaml(ctx : YAML::ParseContext, node : YAML::Nodes::Node) : String
+ HTML.escape(node.value[0, 100])
+ end
+ end
+
+ module StringToArray
+ def self.to_json(value : Array(String), json : JSON::Builder)
+ json.array do
+ value.each do |element|
+ json.string element
+ end
+ end
+ end
+
+ def self.from_json(value : JSON::PullParser) : Array(String)
+ begin
+ result = [] of String
+ value.read_array do
+ result << HTML.escape(value.read_string[0, 100])
+ end
+ rescue ex
+ result = [HTML.escape(value.read_string[0, 100]), ""]
+ end
+
+ result
+ end
+
+ def self.to_yaml(value : Array(String), yaml : YAML::Nodes::Builder)
+ yaml.sequence do
+ value.each do |element|
+ yaml.scalar element
+ end
+ end
+ end
+
+ def self.from_yaml(ctx : YAML::ParseContext, node : YAML::Nodes::Node) : Array(String)
+ begin
+ unless node.is_a?(YAML::Nodes::Sequence)
+ node.raise "Expected sequence, not #{node.class}"
+ end
+
+ result = [] of String
+ node.nodes.each do |item|
+ unless item.is_a?(YAML::Nodes::Scalar)
+ node.raise "Expected scalar, not #{item.class}"
+ end
+
+ result << HTML.escape(item.value[0, 100])
+ end
+ rescue ex
+ if node.is_a?(YAML::Nodes::Scalar)
+ result = [HTML.escape(node.value[0, 100]), ""]
+ else
+ result = ["", ""]
+ end
+ end
+
+ result
+ end
+ end
+
+ module StringToCookies
+ def self.to_yaml(value : HTTP::Cookies, yaml : YAML::Nodes::Builder)
+ (value.map { |c| "#{c.name}=#{c.value}" }).join("; ").to_yaml(yaml)
+ end
+
+ def self.from_yaml(ctx : YAML::ParseContext, node : YAML::Nodes::Node) : HTTP::Cookies
+ unless node.is_a?(YAML::Nodes::Scalar)
+ node.raise "Expected scalar, not #{node.class}"
+ end
+
+ cookies = HTTP::Cookies.new
+ node.value.split(";").each do |cookie|
+ next if cookie.strip.empty?
+ name, value = cookie.split("=", 2)
+ cookies << HTTP::Cookie.new(name.strip, value.strip)
+ end
+
+ cookies
+ end
+ end
+
+ module TimeSpanConverter
+ def self.to_yaml(value : Time::Span, yaml : YAML::Nodes::Builder)
+ return yaml.scalar value.total_minutes.to_i32
+ end
+
+ def self.from_yaml(ctx : YAML::ParseContext, node : YAML::Nodes::Node) : Time::Span
+ if node.is_a?(YAML::Nodes::Scalar)
+ return decode_interval(node.value)
+ else
+ node.raise "Expected scalar, not #{node.class}"
+ end
+ end
+ end
+end
diff --git a/src/invidious/user/user.cr b/src/invidious/user/user.cr
new file mode 100644
index 00000000..a6d05fd1
--- /dev/null
+++ b/src/invidious/user/user.cr
@@ -0,0 +1,27 @@
+require "db"
+
+struct Invidious::User
+ include DB::Serializable
+
+ property updated : Time
+ property notifications : Array(String)
+ property subscriptions : Array(String)
+ property email : String
+
+ @[DB::Field(converter: Invidious::User::PreferencesConverter)]
+ property preferences : Preferences
+ property password : String?
+ property token : String
+ property watched : Array(String)
+ property feed_needs_update : Bool?
+
+ module PreferencesConverter
+ def self.from_rs(rs)
+ begin
+ Preferences.from_json(rs.read(String))
+ rescue ex
+ Preferences.from_json("{}")
+ end
+ end
+ end
+end
diff --git a/src/invidious/users.cr b/src/invidious/users.cr
index e4ebb4d1..65566d20 100644
--- a/src/invidious/users.cr
+++ b/src/invidious/users.cr
@@ -3,374 +3,11 @@ require "crypto/bcrypt/password"
# Materialized views may not be defined using bound parameters (`$1` as used elsewhere)
MATERIALIZED_VIEW_SQL = ->(email : String) { "SELECT cv.* FROM channel_videos cv WHERE EXISTS (SELECT subscriptions FROM users u WHERE cv.ucid = ANY (u.subscriptions) AND u.email = E'#{email.gsub({'\'' => "\\'", '\\' => "\\\\"})}') ORDER BY published DESC" }
-struct User
- include DB::Serializable
-
- property updated : Time
- property notifications : Array(String)
- property subscriptions : Array(String)
- property email : String
-
- @[DB::Field(converter: User::PreferencesConverter)]
- property preferences : Preferences
- property password : String?
- property token : String
- property watched : Array(String)
- property feed_needs_update : Bool?
-
- module PreferencesConverter
- def self.from_rs(rs)
- begin
- Preferences.from_json(rs.read(String))
- rescue ex
- Preferences.from_json("{}")
- end
- end
- end
-end
-
-struct Preferences
- include JSON::Serializable
- include YAML::Serializable
-
- property annotations : Bool = CONFIG.default_user_preferences.annotations
- property annotations_subscribed : Bool = CONFIG.default_user_preferences.annotations_subscribed
- property autoplay : Bool = CONFIG.default_user_preferences.autoplay
-
- @[JSON::Field(converter: Preferences::StringToArray)]
- @[YAML::Field(converter: Preferences::StringToArray)]
- property captions : Array(String) = CONFIG.default_user_preferences.captions
-
- @[JSON::Field(converter: Preferences::StringToArray)]
- @[YAML::Field(converter: Preferences::StringToArray)]
- property comments : Array(String) = CONFIG.default_user_preferences.comments
- property continue : Bool = CONFIG.default_user_preferences.continue
- property continue_autoplay : Bool = CONFIG.default_user_preferences.continue_autoplay
-
- @[JSON::Field(converter: Preferences::BoolToString)]
- @[YAML::Field(converter: Preferences::BoolToString)]
- property dark_mode : String = CONFIG.default_user_preferences.dark_mode
- property latest_only : Bool = CONFIG.default_user_preferences.latest_only
- property listen : Bool = CONFIG.default_user_preferences.listen
- property local : Bool = CONFIG.default_user_preferences.local
-
- @[JSON::Field(converter: Preferences::ProcessString)]
- property locale : String = CONFIG.default_user_preferences.locale
-
- @[JSON::Field(converter: Preferences::ClampInt)]
- property max_results : Int32 = CONFIG.default_user_preferences.max_results
- property notifications_only : Bool = CONFIG.default_user_preferences.notifications_only
-
- @[JSON::Field(converter: Preferences::ProcessString)]
- property player_style : String = CONFIG.default_user_preferences.player_style
-
- @[JSON::Field(converter: Preferences::ProcessString)]
- property quality : String = CONFIG.default_user_preferences.quality
- @[JSON::Field(converter: Preferences::ProcessString)]
- property quality_dash : String = CONFIG.default_user_preferences.quality_dash
- property default_home : String? = CONFIG.default_user_preferences.default_home
- property feed_menu : Array(String) = CONFIG.default_user_preferences.feed_menu
- property related_videos : Bool = CONFIG.default_user_preferences.related_videos
-
- @[JSON::Field(converter: Preferences::ProcessString)]
- property sort : String = CONFIG.default_user_preferences.sort
- property speed : Float32 = CONFIG.default_user_preferences.speed
- property thin_mode : Bool = CONFIG.default_user_preferences.thin_mode
- property unseen_only : Bool = CONFIG.default_user_preferences.unseen_only
- property video_loop : Bool = CONFIG.default_user_preferences.video_loop
- property extend_desc : Bool = CONFIG.default_user_preferences.extend_desc
- property volume : Int32 = CONFIG.default_user_preferences.volume
-
- module BoolToString
- def self.to_json(value : String, json : JSON::Builder)
- json.string value
- end
-
- def self.from_json(value : JSON::PullParser) : String
- begin
- result = value.read_string
-
- if result.empty?
- CONFIG.default_user_preferences.dark_mode
- else
- result
- end
- rescue ex
- if value.read_bool
- "dark"
- else
- "light"
- end
- end
- end
-
- def self.to_yaml(value : String, yaml : YAML::Nodes::Builder)
- yaml.scalar value
- end
-
- def self.from_yaml(ctx : YAML::ParseContext, node : YAML::Nodes::Node) : String
- unless node.is_a?(YAML::Nodes::Scalar)
- node.raise "Expected scalar, not #{node.class}"
- end
-
- case node.value
- when "true"
- "dark"
- when "false"
- "light"
- when ""
- CONFIG.default_user_preferences.dark_mode
- else
- node.value
- end
- end
- end
-
- module ClampInt
- def self.to_json(value : Int32, json : JSON::Builder)
- json.number value
- end
-
- def self.from_json(value : JSON::PullParser) : Int32
- value.read_int.clamp(0, MAX_ITEMS_PER_PAGE).to_i32
- end
-
- def self.to_yaml(value : Int32, yaml : YAML::Nodes::Builder)
- yaml.scalar value
- end
-
- def self.from_yaml(ctx : YAML::ParseContext, node : YAML::Nodes::Node) : Int32
- node.value.clamp(0, MAX_ITEMS_PER_PAGE)
- end
- end
-
- module FamilyConverter
- def self.to_yaml(value : Socket::Family, yaml : YAML::Nodes::Builder)
- case value
- when Socket::Family::UNSPEC
- yaml.scalar nil
- when Socket::Family::INET
- yaml.scalar "ipv4"
- when Socket::Family::INET6
- yaml.scalar "ipv6"
- when Socket::Family::UNIX
- raise "Invalid socket family #{value}"
- end
- end
-
- def self.from_yaml(ctx : YAML::ParseContext, node : YAML::Nodes::Node) : Socket::Family
- if node.is_a?(YAML::Nodes::Scalar)
- case node.value.downcase
- when "ipv4"
- Socket::Family::INET
- when "ipv6"
- Socket::Family::INET6
- else
- Socket::Family::UNSPEC
- end
- else
- node.raise "Expected scalar, not #{node.class}"
- end
- end
- end
-
- module URIConverter
- def self.to_yaml(value : URI, yaml : YAML::Nodes::Builder)
- yaml.scalar value.normalize!
- end
-
- def self.from_yaml(ctx : YAML::ParseContext, node : YAML::Nodes::Node) : URI
- if node.is_a?(YAML::Nodes::Scalar)
- URI.parse node.value
- else
- node.raise "Expected scalar, not #{node.class}"
- end
- end
- end
-
- module ProcessString
- def self.to_json(value : String, json : JSON::Builder)
- json.string value
- end
-
- def self.from_json(value : JSON::PullParser) : String
- HTML.escape(value.read_string[0, 100])
- end
-
- def self.to_yaml(value : String, yaml : YAML::Nodes::Builder)
- yaml.scalar value
- end
-
- def self.from_yaml(ctx : YAML::ParseContext, node : YAML::Nodes::Node) : String
- HTML.escape(node.value[0, 100])
- end
- end
-
- module StringToArray
- def self.to_json(value : Array(String), json : JSON::Builder)
- json.array do
- value.each do |element|
- json.string element
- end
- end
- end
-
- def self.from_json(value : JSON::PullParser) : Array(String)
- begin
- result = [] of String
- value.read_array do
- result << HTML.escape(value.read_string[0, 100])
- end
- rescue ex
- result = [HTML.escape(value.read_string[0, 100]), ""]
- end
-
- result
- end
-
- def self.to_yaml(value : Array(String), yaml : YAML::Nodes::Builder)
- yaml.sequence do
- value.each do |element|
- yaml.scalar element
- end
- end
- end
-
- def self.from_yaml(ctx : YAML::ParseContext, node : YAML::Nodes::Node) : Array(String)
- begin
- unless node.is_a?(YAML::Nodes::Sequence)
- node.raise "Expected sequence, not #{node.class}"
- end
-
- result = [] of String
- node.nodes.each do |item|
- unless item.is_a?(YAML::Nodes::Scalar)
- node.raise "Expected scalar, not #{item.class}"
- end
-
- result << HTML.escape(item.value[0, 100])
- end
- rescue ex
- if node.is_a?(YAML::Nodes::Scalar)
- result = [HTML.escape(node.value[0, 100]), ""]
- else
- result = ["", ""]
- end
- end
-
- result
- end
- end
-
- module StringToCookies
- def self.to_yaml(value : HTTP::Cookies, yaml : YAML::Nodes::Builder)
- (value.map { |c| "#{c.name}=#{c.value}" }).join("; ").to_yaml(yaml)
- end
-
- def self.from_yaml(ctx : YAML::ParseContext, node : YAML::Nodes::Node) : HTTP::Cookies
- unless node.is_a?(YAML::Nodes::Scalar)
- node.raise "Expected scalar, not #{node.class}"
- end
-
- cookies = HTTP::Cookies.new
- node.value.split(";").each do |cookie|
- next if cookie.strip.empty?
- name, value = cookie.split("=", 2)
- cookies << HTTP::Cookie.new(name.strip, value.strip)
- end
-
- cookies
- end
- end
-end
-
-def get_user(sid, headers, db, refresh = true)
- if email = db.query_one?("SELECT email FROM session_ids WHERE id = $1", sid, as: String)
- user = db.query_one("SELECT * FROM users WHERE email = $1", email, as: User)
-
- if refresh && Time.utc - user.updated > 1.minute
- user, sid = fetch_user(sid, headers, db)
- user_array = user.to_a
- user_array[4] = user_array[4].to_json # User preferences
- args = arg_array(user_array)
-
- db.exec("INSERT INTO users VALUES (#{args}) \
- ON CONFLICT (email) DO UPDATE SET updated = $1, subscriptions = $3", args: user_array)
-
- db.exec("INSERT INTO session_ids VALUES ($1,$2,$3) \
- ON CONFLICT (id) DO NOTHING", sid, user.email, Time.utc)
-
- begin
- view_name = "subscriptions_#{sha256(user.email)}"
- db.exec("CREATE MATERIALIZED VIEW #{view_name} AS #{MATERIALIZED_VIEW_SQL.call(user.email)}")
- rescue ex
- end
- end
- else
- user, sid = fetch_user(sid, headers, db)
- user_array = user.to_a
- user_array[4] = user_array[4].to_json # User preferences
- args = arg_array(user.to_a)
-
- db.exec("INSERT INTO users VALUES (#{args}) \
- ON CONFLICT (email) DO UPDATE SET updated = $1, subscriptions = $3", args: user_array)
-
- db.exec("INSERT INTO session_ids VALUES ($1,$2,$3) \
- ON CONFLICT (id) DO NOTHING", sid, user.email, Time.utc)
-
- begin
- view_name = "subscriptions_#{sha256(user.email)}"
- db.exec("CREATE MATERIALIZED VIEW #{view_name} AS #{MATERIALIZED_VIEW_SQL.call(user.email)}")
- rescue ex
- end
- end
-
- return user, sid
-end
-
-def fetch_user(sid, headers, db)
- feed = YT_POOL.client &.get("/subscription_manager?disable_polymer=1", headers)
- feed = XML.parse_html(feed.body)
-
- channels = [] of String
- channels = feed.xpath_nodes(%q(//ul[@id="guide-channels"]/li/a)).compact_map do |channel|
- if {"Popular on YouTube", "Music", "Sports", "Gaming"}.includes? channel["title"]
- nil
- else
- channel["href"].lstrip("/channel/")
- end
- end
-
- channels = get_batch_channels(channels, db, false, false)
-
- email = feed.xpath_node(%q(//a[@class="yt-masthead-picker-header yt-masthead-picker-active-account"]))
- if email
- email = email.content.strip
- else
- email = ""
- end
-
- token = Base64.urlsafe_encode(Random::Secure.random_bytes(32))
-
- user = User.new({
- updated: Time.utc,
- notifications: [] of String,
- subscriptions: channels,
- email: email,
- preferences: Preferences.new(CONFIG.default_user_preferences.to_tuple),
- password: nil,
- token: token,
- watched: [] of String,
- feed_needs_update: true,
- })
- return user, sid
-end
-
def create_user(sid, email, password)
password = Crypto::Bcrypt::Password.create(password, cost: 10)
token = Base64.urlsafe_encode(Random::Secure.random_bytes(32))
- user = User.new({
+ user = Invidious::User.new({
updated: Time.utc,
notifications: [] of String,
subscriptions: [] of String,
@@ -385,135 +22,29 @@ def create_user(sid, email, password)
return user, sid
end
-def generate_captcha(key, db)
- second = Random::Secure.rand(12)
- second_angle = second * 30
- second = second * 5
-
- minute = Random::Secure.rand(12)
- minute_angle = minute * 30
- minute = minute * 5
-
- hour = Random::Secure.rand(12)
- hour_angle = hour * 30 + minute_angle.to_f / 12
- if hour == 0
- hour = 12
- end
-
- clock_svg = <<-END_SVG
- <svg viewBox="0 0 100 100" width="200px" height="200px">
- <circle cx="50" cy="50" r="45" fill="#eee" stroke="black" stroke-width="2"></circle>
-
- <text x="69" y="20.091" text-anchor="middle" fill="black" font-family="Arial" font-size="10px"> 1</text>
- <text x="82.909" y="34" text-anchor="middle" fill="black" font-family="Arial" font-size="10px"> 2</text>
- <text x="88" y="53" text-anchor="middle" fill="black" font-family="Arial" font-size="10px"> 3</text>
- <text x="82.909" y="72" text-anchor="middle" fill="black" font-family="Arial" font-size="10px"> 4</text>
- <text x="69" y="85.909" text-anchor="middle" fill="black" font-family="Arial" font-size="10px"> 5</text>
- <text x="50" y="91" text-anchor="middle" fill="black" font-family="Arial" font-size="10px"> 6</text>
- <text x="31" y="85.909" text-anchor="middle" fill="black" font-family="Arial" font-size="10px"> 7</text>
- <text x="17.091" y="72" text-anchor="middle" fill="black" font-family="Arial" font-size="10px"> 8</text>
- <text x="12" y="53" text-anchor="middle" fill="black" font-family="Arial" font-size="10px"> 9</text>
- <text x="17.091" y="34" text-anchor="middle" fill="black" font-family="Arial" font-size="10px">10</text>
- <text x="31" y="20.091" text-anchor="middle" fill="black" font-family="Arial" font-size="10px">11</text>
- <text x="50" y="15" text-anchor="middle" fill="black" font-family="Arial" font-size="10px">12</text>
-
- <circle cx="50" cy="50" r="3" fill="black"></circle>
- <line id="second" transform="rotate(#{second_angle}, 50, 50)" x1="50" y1="50" x2="50" y2="12" fill="black" stroke="black" stroke-width="1"></line>
- <line id="minute" transform="rotate(#{minute_angle}, 50, 50)" x1="50" y1="50" x2="50" y2="16" fill="black" stroke="black" stroke-width="2"></line>
- <line id="hour" transform="rotate(#{hour_angle}, 50, 50)" x1="50" y1="50" x2="50" y2="24" fill="black" stroke="black" stroke-width="2"></line>
- </svg>
- END_SVG
-
- image = ""
- convert = Process.run(%(rsvg-convert -w 400 -h 400 -b none -f png), shell: true,
- input: IO::Memory.new(clock_svg), output: Process::Redirect::Pipe) do |proc|
- image = proc.output.gets_to_end
- image = Base64.strict_encode(image)
- image = "data:image/png;base64,#{image}"
- end
-
- answer = "#{hour}:#{minute.to_s.rjust(2, '0')}:#{second.to_s.rjust(2, '0')}"
- answer = OpenSSL::HMAC.hexdigest(:sha256, key, answer)
-
- return {
- question: image,
- tokens: {generate_response(answer, {":login"}, key, db, use_nonce: true)},
- }
-end
-
-def generate_text_captcha(key, db)
- response = make_client(TEXTCAPTCHA_URL, &.get("/github.com/iv.org/invidious.json").body)
- response = JSON.parse(response)
-
- tokens = response["a"].as_a.map do |answer|
- generate_response(answer.as_s, {":login"}, key, db, use_nonce: true)
- end
-
- return {
- question: response["q"].as_s,
- tokens: tokens,
- }
-end
-
-def subscribe_ajax(channel_id, action, env_headers)
- headers = HTTP::Headers.new
- headers["Cookie"] = env_headers["Cookie"]
-
- html = YT_POOL.client &.get("/subscription_manager?disable_polymer=1", headers)
-
- cookies = HTTP::Cookies.from_headers(headers)
- html.cookies.each do |cookie|
- if {"VISITOR_INFO1_LIVE", "YSC", "SIDCC"}.includes? cookie.name
- if cookies[cookie.name]?
- cookies[cookie.name] = cookie
- else
- cookies << cookie
- end
- end
- end
- headers = cookies.add_request_headers(headers)
-
- if match = html.body.match(/'XSRF_TOKEN': "(?<session_token>[^"]+)"/)
- session_token = match["session_token"]
-
- headers["content-type"] = "application/x-www-form-urlencoded"
-
- post_req = {
- session_token: session_token,
- }
- post_url = "/subscription_ajax?#{action}=1&c=#{channel_id}"
-
- YT_POOL.client &.post(post_url, headers, form: post_req)
- end
-end
-
-def get_subscription_feed(db, user, max_results = 40, page = 1)
+def get_subscription_feed(user, max_results = 40, page = 1)
limit = max_results.clamp(0, MAX_ITEMS_PER_PAGE)
offset = (page - 1) * limit
- notifications = db.query_one("SELECT notifications FROM users WHERE email = $1", user.email,
- as: Array(String))
+ notifications = Invidious::Database::Users.select_notifications(user)
view_name = "subscriptions_#{sha256(user.email)}"
if user.preferences.notifications_only && !notifications.empty?
# Only show notifications
-
- args = arg_array(notifications)
-
- notifications = db.query_all("SELECT * FROM channel_videos WHERE id IN (#{args}) ORDER BY published DESC", args: notifications, as: ChannelVideo)
+ notifications = Invidious::Database::ChannelVideos.select(notifications)
videos = [] of ChannelVideo
- notifications.sort_by! { |video| video.published }.reverse!
+ notifications.sort_by!(&.published).reverse!
case user.preferences.sort
when "alphabetically"
- notifications.sort_by! { |video| video.title }
+ notifications.sort_by!(&.title)
when "alphabetically - reverse"
- notifications.sort_by! { |video| video.title }.reverse!
+ notifications.sort_by!(&.title).reverse!
when "channel name"
- notifications.sort_by! { |video| video.author }
+ notifications.sort_by!(&.author)
when "channel name - reverse"
- notifications.sort_by! { |video| video.author }.reverse!
+ notifications.sort_by!(&.author).reverse!
else nil # Ignore
end
else
@@ -534,7 +65,7 @@ def get_subscription_feed(db, user, max_results = 40, page = 1)
videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM #{view_name} ORDER BY ucid, published DESC", as: ChannelVideo)
end
- videos.sort_by! { |video| video.published }.reverse!
+ videos.sort_by!(&.published).reverse!
else
if user.preferences.unseen_only
# Only show unwatched
@@ -554,20 +85,19 @@ def get_subscription_feed(db, user, max_results = 40, page = 1)
case user.preferences.sort
when "published - reverse"
- videos.sort_by! { |video| video.published }
+ videos.sort_by!(&.published)
when "alphabetically"
- videos.sort_by! { |video| video.title }
+ videos.sort_by!(&.title)
when "alphabetically - reverse"
- videos.sort_by! { |video| video.title }.reverse!
+ videos.sort_by!(&.title).reverse!
when "channel name"
- videos.sort_by! { |video| video.author }
+ videos.sort_by!(&.author)
when "channel name - reverse"
- videos.sort_by! { |video| video.author }.reverse!
+ videos.sort_by!(&.author).reverse!
else nil # Ignore
end
- notifications = PG_DB.query_one("SELECT notifications FROM users WHERE email = $1", user.email, as: Array(String))
-
+ notifications = Invidious::Database::Users.select_notifications(user)
notifications = videos.select { |v| notifications.includes? v.id }
videos = videos - notifications
end
diff --git a/src/invidious/videos.cr b/src/invidious/videos.cr
index bf281507..ae09e736 100644
--- a/src/invidious/videos.cr
+++ b/src/invidious/videos.cr
@@ -1,255 +1,22 @@
-CAPTION_LANGUAGES = {
- "",
- "English",
- "English (auto-generated)",
- "Afrikaans",
- "Albanian",
- "Amharic",
- "Arabic",
- "Armenian",
- "Azerbaijani",
- "Bangla",
- "Basque",
- "Belarusian",
- "Bosnian",
- "Bulgarian",
- "Burmese",
- "Catalan",
- "Cebuano",
- "Chinese (Simplified)",
- "Chinese (Traditional)",
- "Corsican",
- "Croatian",
- "Czech",
- "Danish",
- "Dutch",
- "Esperanto",
- "Estonian",
- "Filipino",
- "Finnish",
- "French",
- "Galician",
- "Georgian",
- "German",
- "Greek",
- "Gujarati",
- "Haitian Creole",
- "Hausa",
- "Hawaiian",
- "Hebrew",
- "Hindi",
- "Hmong",
- "Hungarian",
- "Icelandic",
- "Igbo",
- "Indonesian",
- "Irish",
- "Italian",
- "Japanese",
- "Javanese",
- "Kannada",
- "Kazakh",
- "Khmer",
- "Korean",
- "Kurdish",
- "Kyrgyz",
- "Lao",
- "Latin",
- "Latvian",
- "Lithuanian",
- "Luxembourgish",
- "Macedonian",
- "Malagasy",
- "Malay",
- "Malayalam",
- "Maltese",
- "Maori",
- "Marathi",
- "Mongolian",
- "Nepali",
- "Norwegian Bokmål",
- "Nyanja",
- "Pashto",
- "Persian",
- "Polish",
- "Portuguese",
- "Punjabi",
- "Romanian",
- "Russian",
- "Samoan",
- "Scottish Gaelic",
- "Serbian",
- "Shona",
- "Sindhi",
- "Sinhala",
- "Slovak",
- "Slovenian",
- "Somali",
- "Southern Sotho",
- "Spanish",
- "Spanish (Latin America)",
- "Sundanese",
- "Swahili",
- "Swedish",
- "Tajik",
- "Tamil",
- "Telugu",
- "Thai",
- "Turkish",
- "Ukrainian",
- "Urdu",
- "Uzbek",
- "Vietnamese",
- "Welsh",
- "Western Frisian",
- "Xhosa",
- "Yiddish",
- "Yoruba",
- "Zulu",
-}
-
-REGIONS = {"AD", "AE", "AF", "AG", "AI", "AL", "AM", "AO", "AQ", "AR", "AS", "AT", "AU", "AW", "AX", "AZ", "BA", "BB", "BD", "BE", "BF", "BG", "BH", "BI", "BJ", "BL", "BM", "BN", "BO", "BQ", "BR", "BS", "BT", "BV", "BW", "BY", "BZ", "CA", "CC", "CD", "CF", "CG", "CH", "CI", "CK", "CL", "CM", "CN", "CO", "CR", "CU", "CV", "CW", "CX", "CY", "CZ", "DE", "DJ", "DK", "DM", "DO", "DZ", "EC", "EE", "EG", "EH", "ER", "ES", "ET", "FI", "FJ", "FK", "FM", "FO", "FR", "GA", "GB", "GD", "GE", "GF", "GG", "GH", "GI", "GL", "GM", "GN", "GP", "GQ", "GR", "GS", "GT", "GU", "GW", "GY", "HK", "HM", "HN", "HR", "HT", "HU", "ID", "IE", "IL", "IM", "IN", "IO", "IQ", "IR", "IS", "IT", "JE", "JM", "JO", "JP", "KE", "KG", "KH", "KI", "KM", "KN", "KP", "KR", "KW", "KY", "KZ", "LA", "LB", "LC", "LI", "LK", "LR", "LS", "LT", "LU", "LV", "LY", "MA", "MC", "MD", "ME", "MF", "MG", "MH", "MK", "ML", "MM", "MN", "MO", "MP", "MQ", "MR", "MS", "MT", "MU", "MV", "MW", "MX", "MY", "MZ", "NA", "NC", "NE", "NF", "NG", "NI", "NL", "NO", "NP", "NR", "NU", "NZ", "OM", "PA", "PE", "PF", "PG", "PH", "PK", "PL", "PM", "PN", "PR", "PS", "PT", "PW", "PY", "QA", "RE", "RO", "RS", "RU", "RW", "SA", "SB", "SC", "SD", "SE", "SG", "SH", "SI", "SJ", "SK", "SL", "SM", "SN", "SO", "SR", "SS", "ST", "SV", "SX", "SY", "SZ", "TC", "TD", "TF", "TG", "TH", "TJ", "TK", "TL", "TM", "TN", "TO", "TR", "TT", "TV", "TW", "TZ", "UA", "UG", "UM", "US", "UY", "UZ", "VA", "VC", "VE", "VG", "VI", "VN", "VU", "WF", "WS", "YE", "YT", "ZA", "ZM", "ZW"}
-
-# See https://github.com/rg3/youtube-dl/blob/master/youtube_dl/extractor/youtube.py#L380-#L476
-VIDEO_FORMATS = {
- "5" => {"ext" => "flv", "width" => 400, "height" => 240, "acodec" => "mp3", "abr" => 64, "vcodec" => "h263"},
- "6" => {"ext" => "flv", "width" => 450, "height" => 270, "acodec" => "mp3", "abr" => 64, "vcodec" => "h263"},
- "13" => {"ext" => "3gp", "acodec" => "aac", "vcodec" => "mp4v"},
- "17" => {"ext" => "3gp", "width" => 176, "height" => 144, "acodec" => "aac", "abr" => 24, "vcodec" => "mp4v"},
- "18" => {"ext" => "mp4", "width" => 640, "height" => 360, "acodec" => "aac", "abr" => 96, "vcodec" => "h264"},
- "22" => {"ext" => "mp4", "width" => 1280, "height" => 720, "acodec" => "aac", "abr" => 192, "vcodec" => "h264"},
- "34" => {"ext" => "flv", "width" => 640, "height" => 360, "acodec" => "aac", "abr" => 128, "vcodec" => "h264"},
- "35" => {"ext" => "flv", "width" => 854, "height" => 480, "acodec" => "aac", "abr" => 128, "vcodec" => "h264"},
-
- "36" => {"ext" => "3gp", "width" => 320, "acodec" => "aac", "vcodec" => "mp4v"},
- "37" => {"ext" => "mp4", "width" => 1920, "height" => 1080, "acodec" => "aac", "abr" => 192, "vcodec" => "h264"},
- "38" => {"ext" => "mp4", "width" => 4096, "height" => 3072, "acodec" => "aac", "abr" => 192, "vcodec" => "h264"},
- "43" => {"ext" => "webm", "width" => 640, "height" => 360, "acodec" => "vorbis", "abr" => 128, "vcodec" => "vp8"},
- "44" => {"ext" => "webm", "width" => 854, "height" => 480, "acodec" => "vorbis", "abr" => 128, "vcodec" => "vp8"},
- "45" => {"ext" => "webm", "width" => 1280, "height" => 720, "acodec" => "vorbis", "abr" => 192, "vcodec" => "vp8"},
- "46" => {"ext" => "webm", "width" => 1920, "height" => 1080, "acodec" => "vorbis", "abr" => 192, "vcodec" => "vp8"},
- "59" => {"ext" => "mp4", "width" => 854, "height" => 480, "acodec" => "aac", "abr" => 128, "vcodec" => "h264"},
- "78" => {"ext" => "mp4", "width" => 854, "height" => 480, "acodec" => "aac", "abr" => 128, "vcodec" => "h264"},
-
- # 3D videos
- "82" => {"ext" => "mp4", "height" => 360, "format" => "3D", "acodec" => "aac", "abr" => 128, "vcodec" => "h264"},
- "83" => {"ext" => "mp4", "height" => 480, "format" => "3D", "acodec" => "aac", "abr" => 128, "vcodec" => "h264"},
- "84" => {"ext" => "mp4", "height" => 720, "format" => "3D", "acodec" => "aac", "abr" => 192, "vcodec" => "h264"},
- "85" => {"ext" => "mp4", "height" => 1080, "format" => "3D", "acodec" => "aac", "abr" => 192, "vcodec" => "h264"},
- "100" => {"ext" => "webm", "height" => 360, "format" => "3D", "acodec" => "vorbis", "abr" => 128, "vcodec" => "vp8"},
- "101" => {"ext" => "webm", "height" => 480, "format" => "3D", "acodec" => "vorbis", "abr" => 192, "vcodec" => "vp8"},
- "102" => {"ext" => "webm", "height" => 720, "format" => "3D", "acodec" => "vorbis", "abr" => 192, "vcodec" => "vp8"},
-
- # Apple HTTP Live Streaming
- "91" => {"ext" => "mp4", "height" => 144, "format" => "HLS", "acodec" => "aac", "abr" => 48, "vcodec" => "h264"},
- "92" => {"ext" => "mp4", "height" => 240, "format" => "HLS", "acodec" => "aac", "abr" => 48, "vcodec" => "h264"},
- "93" => {"ext" => "mp4", "height" => 360, "format" => "HLS", "acodec" => "aac", "abr" => 128, "vcodec" => "h264"},
- "94" => {"ext" => "mp4", "height" => 480, "format" => "HLS", "acodec" => "aac", "abr" => 128, "vcodec" => "h264"},
- "95" => {"ext" => "mp4", "height" => 720, "format" => "HLS", "acodec" => "aac", "abr" => 256, "vcodec" => "h264"},
- "96" => {"ext" => "mp4", "height" => 1080, "format" => "HLS", "acodec" => "aac", "abr" => 256, "vcodec" => "h264"},
- "132" => {"ext" => "mp4", "height" => 240, "format" => "HLS", "acodec" => "aac", "abr" => 48, "vcodec" => "h264"},
- "151" => {"ext" => "mp4", "height" => 72, "format" => "HLS", "acodec" => "aac", "abr" => 24, "vcodec" => "h264"},
-
- # DASH mp4 video
- "133" => {"ext" => "mp4", "height" => 240, "format" => "DASH video", "vcodec" => "h264"},
- "134" => {"ext" => "mp4", "height" => 360, "format" => "DASH video", "vcodec" => "h264"},
- "135" => {"ext" => "mp4", "height" => 480, "format" => "DASH video", "vcodec" => "h264"},
- "136" => {"ext" => "mp4", "height" => 720, "format" => "DASH video", "vcodec" => "h264"},
- "137" => {"ext" => "mp4", "height" => 1080, "format" => "DASH video", "vcodec" => "h264"},
- "138" => {"ext" => "mp4", "format" => "DASH video", "vcodec" => "h264"}, # Height can vary (https://github.com/ytdl-org/youtube-dl/issues/4559)
- "160" => {"ext" => "mp4", "height" => 144, "format" => "DASH video", "vcodec" => "h264"},
- "212" => {"ext" => "mp4", "height" => 480, "format" => "DASH video", "vcodec" => "h264"},
- "264" => {"ext" => "mp4", "height" => 1440, "format" => "DASH video", "vcodec" => "h264"},
- "298" => {"ext" => "mp4", "height" => 720, "format" => "DASH video", "vcodec" => "h264", "fps" => 60},
- "299" => {"ext" => "mp4", "height" => 1080, "format" => "DASH video", "vcodec" => "h264", "fps" => 60},
- "266" => {"ext" => "mp4", "height" => 2160, "format" => "DASH video", "vcodec" => "h264"},
-
- # Dash mp4 audio
- "139" => {"ext" => "m4a", "format" => "DASH audio", "acodec" => "aac", "abr" => 48, "container" => "m4a_dash"},
- "140" => {"ext" => "m4a", "format" => "DASH audio", "acodec" => "aac", "abr" => 128, "container" => "m4a_dash"},
- "141" => {"ext" => "m4a", "format" => "DASH audio", "acodec" => "aac", "abr" => 256, "container" => "m4a_dash"},
- "256" => {"ext" => "m4a", "format" => "DASH audio", "acodec" => "aac", "container" => "m4a_dash"},
- "258" => {"ext" => "m4a", "format" => "DASH audio", "acodec" => "aac", "container" => "m4a_dash"},
- "325" => {"ext" => "m4a", "format" => "DASH audio", "acodec" => "dtse", "container" => "m4a_dash"},
- "328" => {"ext" => "m4a", "format" => "DASH audio", "acodec" => "ec-3", "container" => "m4a_dash"},
-
- # Dash webm
- "167" => {"ext" => "webm", "height" => 360, "width" => 640, "format" => "DASH video", "container" => "webm", "vcodec" => "vp8"},
- "168" => {"ext" => "webm", "height" => 480, "width" => 854, "format" => "DASH video", "container" => "webm", "vcodec" => "vp8"},
- "169" => {"ext" => "webm", "height" => 720, "width" => 1280, "format" => "DASH video", "container" => "webm", "vcodec" => "vp8"},
- "170" => {"ext" => "webm", "height" => 1080, "width" => 1920, "format" => "DASH video", "container" => "webm", "vcodec" => "vp8"},
- "218" => {"ext" => "webm", "height" => 480, "width" => 854, "format" => "DASH video", "container" => "webm", "vcodec" => "vp8"},
- "219" => {"ext" => "webm", "height" => 480, "width" => 854, "format" => "DASH video", "container" => "webm", "vcodec" => "vp8"},
- "278" => {"ext" => "webm", "height" => 144, "format" => "DASH video", "container" => "webm", "vcodec" => "vp9"},
- "242" => {"ext" => "webm", "height" => 240, "format" => "DASH video", "vcodec" => "vp9"},
- "243" => {"ext" => "webm", "height" => 360, "format" => "DASH video", "vcodec" => "vp9"},
- "244" => {"ext" => "webm", "height" => 480, "format" => "DASH video", "vcodec" => "vp9"},
- "245" => {"ext" => "webm", "height" => 480, "format" => "DASH video", "vcodec" => "vp9"},
- "246" => {"ext" => "webm", "height" => 480, "format" => "DASH video", "vcodec" => "vp9"},
- "247" => {"ext" => "webm", "height" => 720, "format" => "DASH video", "vcodec" => "vp9"},
- "248" => {"ext" => "webm", "height" => 1080, "format" => "DASH video", "vcodec" => "vp9"},
- "271" => {"ext" => "webm", "height" => 1440, "format" => "DASH video", "vcodec" => "vp9"},
- # itag 272 videos are either 3840x2160 (e.g. RtoitU2A-3E) or 7680x4320 (sLprVF6d7Ug)
- "272" => {"ext" => "webm", "height" => 2160, "format" => "DASH video", "vcodec" => "vp9"},
- "302" => {"ext" => "webm", "height" => 720, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
- "303" => {"ext" => "webm", "height" => 1080, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
- "308" => {"ext" => "webm", "height" => 1440, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
- "313" => {"ext" => "webm", "height" => 2160, "format" => "DASH video", "vcodec" => "vp9"},
- "315" => {"ext" => "webm", "height" => 2160, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
- "330" => {"ext" => "webm", "height" => 144, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
- "331" => {"ext" => "webm", "height" => 240, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
- "332" => {"ext" => "webm", "height" => 360, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
- "333" => {"ext" => "webm", "height" => 480, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
- "334" => {"ext" => "webm", "height" => 720, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
- "335" => {"ext" => "webm", "height" => 1080, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
- "336" => {"ext" => "webm", "height" => 1440, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
- "337" => {"ext" => "webm", "height" => 2160, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
-
- # Dash webm audio
- "171" => {"ext" => "webm", "acodec" => "vorbis", "format" => "DASH audio", "abr" => 128},
- "172" => {"ext" => "webm", "acodec" => "vorbis", "format" => "DASH audio", "abr" => 256},
-
- # Dash webm audio with opus inside
- "249" => {"ext" => "webm", "format" => "DASH audio", "acodec" => "opus", "abr" => 50},
- "250" => {"ext" => "webm", "format" => "DASH audio", "acodec" => "opus", "abr" => 70},
- "251" => {"ext" => "webm", "format" => "DASH audio", "acodec" => "opus", "abr" => 160},
-
- # av01 video only formats sometimes served with "unknown" codecs
- "394" => {"ext" => "mp4", "height" => 144, "vcodec" => "av01.0.05M.08"},
- "395" => {"ext" => "mp4", "height" => 240, "vcodec" => "av01.0.05M.08"},
- "396" => {"ext" => "mp4", "height" => 360, "vcodec" => "av01.0.05M.08"},
- "397" => {"ext" => "mp4", "height" => 480, "vcodec" => "av01.0.05M.08"},
-}
-
-struct VideoPreferences
- include JSON::Serializable
-
- property annotations : Bool
- property autoplay : Bool
- property comments : Array(String)
- property continue : Bool
- property continue_autoplay : Bool
- property controls : Bool
- property listen : Bool
- property local : Bool
- property preferred_captions : Array(String)
- property player_style : String
- property quality : String
- property quality_dash : String
- property raw : Bool
- property region : String?
- property related_videos : Bool
- property speed : Float32 | Float64
- property video_end : Float64 | Int32
- property video_loop : Bool
- property extend_desc : Bool
- property video_start : Float64 | Int32
- property volume : Int32
+enum VideoType
+ Video
+ Livestream
+ Scheduled
end
struct Video
include DB::Serializable
+ # Version of the JSON structure
+ # It prevents us from loading an incompatible version from cache
+ # (either newer or older, if instances with different versions run
+ # concurrently, e.g during a version upgrade rollout).
+ #
+ # NOTE: don't forget to bump this number if any change is made to
+ # the `params` structure in videos/parser.cr!!!
+ #
+ SCHEMA_VERSION = 2
+
property id : String
@[DB::Field(converter: Video::JSONConverter)]
@@ -257,13 +24,7 @@ struct Video
property updated : Time
@[DB::Field(ignore: true)]
- property captions : Array(Caption)?
-
- @[DB::Field(ignore: true)]
- property adaptive_fmts : Array(Hash(String, JSON::Any))?
-
- @[DB::Field(ignore: true)]
- property fmt_stream : Array(Hash(String, JSON::Any))?
+ @captions = [] of Invidious::Videos::Captions::Metadata
@[DB::Field(ignore: true)]
property description : String?
@@ -274,345 +35,81 @@ struct Video
end
end
- def to_json(locale, json : JSON::Builder)
- json.object do
- json.field "type", "video"
-
- json.field "title", self.title
- json.field "videoId", self.id
-
- json.field "error", info["reason"] if info["reason"]?
-
- json.field "videoThumbnails" do
- generate_thumbnails(json, self.id)
- end
- json.field "storyboards" do
- generate_storyboards(json, self.id, self.storyboards)
- end
-
- json.field "description", self.description
- json.field "descriptionHtml", self.description_html
- json.field "published", self.published.to_unix
- json.field "publishedText", translate(locale, "`x` ago", recode_date(self.published, locale))
- json.field "keywords", self.keywords
-
- json.field "viewCount", self.views
- json.field "likeCount", self.likes
- json.field "dislikeCount", self.dislikes
-
- json.field "paid", self.paid
- json.field "premium", self.premium
- json.field "isFamilyFriendly", self.is_family_friendly
- json.field "allowedRegions", self.allowed_regions
- json.field "genre", self.genre
- json.field "genreUrl", self.genre_url
-
- json.field "author", self.author
- json.field "authorId", self.ucid
- json.field "authorUrl", "/channel/#{self.ucid}"
-
- json.field "authorThumbnails" do
- json.array do
- qualities = {32, 48, 76, 100, 176, 512}
-
- qualities.each do |quality|
- json.object do
- json.field "url", self.author_thumbnail.gsub(/=s\d+/, "=s#{quality}")
- json.field "width", quality
- json.field "height", quality
- end
- end
- end
- end
-
- json.field "subCountText", self.sub_count_text
-
- json.field "lengthSeconds", self.length_seconds
- json.field "allowRatings", self.allow_ratings
- json.field "rating", self.average_rating
- json.field "isListed", self.is_listed
- json.field "liveNow", self.live_now
- json.field "isUpcoming", self.is_upcoming
-
- if self.premiere_timestamp
- json.field "premiereTimestamp", self.premiere_timestamp.try &.to_unix
- end
-
- if hlsvp = self.hls_manifest_url
- hlsvp = hlsvp.gsub("https://manifest.googlevideo.com", HOST_URL)
- json.field "hlsUrl", hlsvp
- end
-
- json.field "dashUrl", "#{HOST_URL}/api/manifest/dash/id/#{id}"
-
- json.field "adaptiveFormats" do
- json.array do
- self.adaptive_fmts.each do |fmt|
- json.object do
- json.field "index", "#{fmt["indexRange"]["start"]}-#{fmt["indexRange"]["end"]}"
- json.field "bitrate", fmt["bitrate"].as_i.to_s
- json.field "init", "#{fmt["initRange"]["start"]}-#{fmt["initRange"]["end"]}"
- json.field "url", fmt["url"]
- json.field "itag", fmt["itag"].as_i.to_s
- json.field "type", fmt["mimeType"]
- json.field "clen", fmt["contentLength"]
- json.field "lmt", fmt["lastModified"]
- json.field "projectionType", fmt["projectionType"]
-
- fmt_info = itag_to_metadata?(fmt["itag"])
- if fmt_info
- fps = fmt_info["fps"]?.try &.to_i || fmt["fps"]?.try &.as_i || 30
- json.field "fps", fps
- json.field "container", fmt_info["ext"]
- json.field "encoding", fmt_info["vcodec"]? || fmt_info["acodec"]
-
- if fmt_info["height"]?
- json.field "resolution", "#{fmt_info["height"]}p"
-
- quality_label = "#{fmt_info["height"]}p"
- if fps > 30
- quality_label += "60"
- end
- json.field "qualityLabel", quality_label
-
- if fmt_info["width"]?
- json.field "size", "#{fmt_info["width"]}x#{fmt_info["height"]}"
- end
- end
- end
- end
- end
- end
- end
+ # Methods for API v1 JSON
- json.field "formatStreams" do
- json.array do
- self.fmt_stream.each do |fmt|
- json.object do
- json.field "url", fmt["url"]
- json.field "itag", fmt["itag"].as_i.to_s
- json.field "type", fmt["mimeType"]
- json.field "quality", fmt["quality"]
-
- fmt_info = itag_to_metadata?(fmt["itag"])
- if fmt_info
- fps = fmt_info["fps"]?.try &.to_i || fmt["fps"]?.try &.as_i || 30
- json.field "fps", fps
- json.field "container", fmt_info["ext"]
- json.field "encoding", fmt_info["vcodec"]? || fmt_info["acodec"]
-
- if fmt_info["height"]?
- json.field "resolution", "#{fmt_info["height"]}p"
-
- quality_label = "#{fmt_info["height"]}p"
- if fps > 30
- quality_label += "60"
- end
- json.field "qualityLabel", quality_label
-
- if fmt_info["width"]?
- json.field "size", "#{fmt_info["width"]}x#{fmt_info["height"]}"
- end
- end
- end
- end
- end
- end
- end
-
- json.field "captions" do
- json.array do
- self.captions.each do |caption|
- json.object do
- json.field "label", caption.name.simpleText
- json.field "languageCode", caption.languageCode
- json.field "url", "/api/v1/captions/#{id}?label=#{URI.encode_www_form(caption.name.simpleText)}"
- end
- end
- end
- end
-
- json.field "recommendedVideos" do
- json.array do
- self.related_videos.each do |rv|
- if rv["id"]?
- json.object do
- json.field "videoId", rv["id"]
- json.field "title", rv["title"]
- json.field "videoThumbnails" do
- generate_thumbnails(json, rv["id"])
- end
-
- json.field "author", rv["author"]
- json.field "authorUrl", rv["author_url"]?
- json.field "authorId", rv["ucid"]?
- if rv["author_thumbnail"]?
- json.field "authorThumbnails" do
- json.array do
- qualities = {32, 48, 76, 100, 176, 512}
-
- qualities.each do |quality|
- json.object do
- json.field "url", rv["author_thumbnail"]?.try &.gsub(/s\d+-/, "s#{quality}-")
- json.field "width", quality
- json.field "height", quality
- end
- end
- end
- end
- end
-
- json.field "lengthSeconds", rv["length_seconds"]?.try &.to_i
- json.field "viewCountText", rv["short_view_count_text"]?
- json.field "viewCount", rv["view_count"]?.try &.empty? ? nil : rv["view_count"].to_i64
- end
- end
- end
- end
- end
- end
+ def to_json(locale : String?, json : JSON::Builder)
+ Invidious::JSONify::APIv1.video(self, json, locale: locale)
end
- def to_json(locale, json : JSON::Builder | Nil = nil)
- if json
- to_json(locale, json)
- else
- JSON.build do |json|
- to_json(locale, json)
- end
+ # TODO: remove the locale and follow the crystal convention
+ def to_json(locale : String?, _json : Nil)
+ JSON.build do |json|
+ Invidious::JSONify::APIv1.video(self, json, locale: locale)
end
end
- def title
- info["videoDetails"]["title"]?.try &.as_s || ""
+ def to_json(json : JSON::Builder | Nil = nil)
+ to_json(nil, json)
end
- def ucid
- info["videoDetails"]["channelId"]?.try &.as_s || ""
- end
+ # Misc methods
- def author
- info["videoDetails"]["author"]?.try &.as_s || ""
+ def video_type : VideoType
+ video_type = info["videoType"]?.try &.as_s || "video"
+ return VideoType.parse?(video_type) || VideoType::Video
end
- def length_seconds : Int32
- info["microformat"]?.try &.["playerMicroformatRenderer"]?.try &.["lengthSeconds"]?.try &.as_s.to_i ||
- info["videoDetails"]["lengthSeconds"]?.try &.as_s.to_i || 0
- end
-
- def views : Int64
- info["videoDetails"]["viewCount"]?.try &.as_s.to_i64 || 0_i64
- end
-
- def likes : Int64
- info["likes"]?.try &.as_i64 || 0_i64
- end
-
- def dislikes : Int64
- info["dislikes"]?.try &.as_i64 || 0_i64
- end
-
- def average_rating : Float64
- # (likes / (likes + dislikes) * 4 + 1)
- info["videoDetails"]["averageRating"]?.try { |t| t.as_f? || t.as_i64?.try &.to_f64 }.try &.round(4) || 0.0
+ def schema_version : Int
+ return info["version"]?.try &.as_i || 1
end
def published : Time
- info["microformat"]?.try &.["playerMicroformatRenderer"]?.try &.["publishDate"]?.try { |t| Time.parse(t.as_s, "%Y-%m-%d", Time::Location::UTC) } || Time.utc
+ return info["published"]?
+ .try { |t| Time.parse(t.as_s, "%Y-%m-%d", Time::Location::UTC) } || Time.utc
end
def published=(other : Time)
- info["microformat"].as_h["playerMicroformatRenderer"].as_h["publishDate"] = JSON::Any.new(other.to_s("%Y-%m-%d"))
- end
-
- def cookie
- info["cookie"]?.try &.as_h.map { |k, v| "#{k}=#{v}" }.join("; ") || ""
- end
-
- def allow_ratings
- r = info["videoDetails"]["allowRatings"]?.try &.as_bool
- r.nil? ? false : r
+ info["published"] = JSON::Any.new(other.to_s("%Y-%m-%d"))
end
def live_now
- info["microformat"]?.try &.["playerMicroformatRenderer"]?
- .try &.["liveBroadcastDetails"]?.try &.["isLiveNow"]?.try &.as_bool || false
+ return (self.video_type == VideoType::Livestream)
end
- def is_listed
- info["videoDetails"]["isCrawlable"]?.try &.as_bool || false
- end
-
- def is_upcoming
- info["videoDetails"]["isUpcoming"]?.try &.as_bool || false
+ def post_live_dvr
+ return info["isPostLiveDvr"].as_bool
end
def premiere_timestamp : Time?
- info["microformat"]?.try &.["playerMicroformatRenderer"]?
- .try &.["liveBroadcastDetails"]?.try &.["startTimestamp"]?.try { |t| Time.parse_rfc3339(t.as_s) }
- end
-
- def keywords
- info["videoDetails"]["keywords"]?.try &.as_a.map &.as_s || [] of String
+ info
+ .dig?("microformat", "playerMicroformatRenderer", "liveBroadcastDetails", "startTimestamp")
+ .try { |t| Time.parse_rfc3339(t.as_s) }
end
def related_videos
info["relatedVideos"]?.try &.as_a.map { |h| h.as_h.transform_values &.as_s } || [] of Hash(String, String)
end
- def allowed_regions
- info["microformat"]?.try &.["playerMicroformatRenderer"]?
- .try &.["availableCountries"]?.try &.as_a.map &.as_s || [] of String
- end
-
- def author_thumbnail : String
- info["authorThumbnail"]?.try &.as_s || ""
- end
-
- def sub_count_text : String
- info["subCountText"]?.try &.as_s || "-"
- end
+ # Methods for parsing streaming data
- def fmt_stream
- return @fmt_stream.as(Array(Hash(String, JSON::Any))) if @fmt_stream
-
- fmt_stream = info["streamingData"]?.try &.["formats"]?.try &.as_a.map &.as_h || [] of Hash(String, JSON::Any)
- fmt_stream.each do |fmt|
- if s = (fmt["cipher"]? || fmt["signatureCipher"]?).try { |h| HTTP::Params.parse(h.as_s) }
- s.each do |k, v|
- fmt[k] = JSON::Any.new(v)
- end
- fmt["url"] = JSON::Any.new("#{fmt["url"]}#{DECRYPT_FUNCTION.decrypt_signature(fmt)}")
- end
-
- fmt["url"] = JSON::Any.new("#{fmt["url"]}&host=#{URI.parse(fmt["url"].as_s).host}")
- fmt["url"] = JSON::Any.new("#{fmt["url"]}&region=#{self.info["region"]}") if self.info["region"]?
+ def fmt_stream : Array(Hash(String, JSON::Any))
+ if formats = info.dig?("streamingData", "formats")
+ return formats
+ .as_a.map(&.as_h)
+ .sort_by! { |f| f["width"]?.try &.as_i || 0 }
+ else
+ return [] of Hash(String, JSON::Any)
end
- fmt_stream.sort_by! { |f| f["width"]?.try &.as_i || 0 }
- @fmt_stream = fmt_stream
- return @fmt_stream.as(Array(Hash(String, JSON::Any)))
end
- def adaptive_fmts
- return @adaptive_fmts.as(Array(Hash(String, JSON::Any))) if @adaptive_fmts
- fmt_stream = info["streamingData"]?.try &.["adaptiveFormats"]?.try &.as_a.map &.as_h || [] of Hash(String, JSON::Any)
- fmt_stream.each do |fmt|
- if s = (fmt["cipher"]? || fmt["signatureCipher"]?).try { |h| HTTP::Params.parse(h.as_s) }
- s.each do |k, v|
- fmt[k] = JSON::Any.new(v)
- end
- fmt["url"] = JSON::Any.new("#{fmt["url"]}#{DECRYPT_FUNCTION.decrypt_signature(fmt)}")
- end
-
- fmt["url"] = JSON::Any.new("#{fmt["url"]}&host=#{URI.parse(fmt["url"].as_s).host}")
- fmt["url"] = JSON::Any.new("#{fmt["url"]}&region=#{self.info["region"]}") if self.info["region"]?
+ def adaptive_fmts : Array(Hash(String, JSON::Any))
+ if formats = info.dig?("streamingData", "adaptiveFormats")
+ return formats
+ .as_a.map(&.as_h)
+ .sort_by! { |f| f["width"]?.try &.as_i || 0 }
+ else
+ return [] of Hash(String, JSON::Any)
end
- # See https://github.com/TeamNewPipe/NewPipe/issues/2415
- # Some streams are segmented by URL `sq/` rather than index, for now we just filter them out
- fmt_stream.reject! { |f| !f["indexRange"]? }
- fmt_stream.sort_by! { |f| f["width"]?.try &.as_i || 0 }
- @adaptive_fmts = fmt_stream
- return @adaptive_fmts.as(Array(Hash(String, JSON::Any)))
end
def video_streams
@@ -623,391 +120,222 @@ struct Video
adaptive_fmts.select &.["mimeType"]?.try &.as_s.starts_with?("audio")
end
- def storyboards
- storyboards = info["storyboards"]?
- .try &.as_h
- .try &.["playerStoryboardSpecRenderer"]?
- .try &.["spec"]?
- .try &.as_s.split("|")
-
- if !storyboards
- if storyboard = info["storyboards"]?
- .try &.as_h
- .try &.["playerLiveStoryboardSpecRenderer"]?
- .try &.["spec"]?
- .try &.as_s
- return [{
- url: storyboard.split("#")[0],
- width: 106,
- height: 60,
- count: -1,
- interval: 5000,
- storyboard_width: 3,
- storyboard_height: 3,
- storyboard_count: -1,
- }]
- end
- end
-
- items = [] of NamedTuple(
- url: String,
- width: Int32,
- height: Int32,
- count: Int32,
- interval: Int32,
- storyboard_width: Int32,
- storyboard_height: Int32,
- storyboard_count: Int32)
-
- return items if !storyboards
-
- url = URI.parse(storyboards.shift)
- params = HTTP::Params.parse(url.query || "")
-
- storyboards.each_with_index do |storyboard, i|
- width, height, count, storyboard_width, storyboard_height, interval, _, sigh = storyboard.split("#")
- params["sigh"] = sigh
- url.query = params.to_s
-
- width = width.to_i
- height = height.to_i
- count = count.to_i
- interval = interval.to_i
- storyboard_width = storyboard_width.to_i
- storyboard_height = storyboard_height.to_i
- storyboard_count = (count / (storyboard_width * storyboard_height)).ceil.to_i
-
- items << {
- url: url.to_s.sub("$L", i).sub("$N", "M$M"),
- width: width,
- height: height,
- count: count,
- interval: interval,
- storyboard_width: storyboard_width,
- storyboard_height: storyboard_height,
- storyboard_count: storyboard_count,
- }
- end
+ # Misc. methods
- items
+ def storyboards
+ container = info.dig?("storyboards") || JSON::Any.new("{}")
+ return IV::Videos::Storyboard.from_yt_json(container, self.length_seconds)
end
def paid
- reason = info["playabilityStatus"]?.try &.["reason"]?
- paid = reason == "This video requires payment to watch." ? true : false
- paid
+ return (self.reason || "").includes? "requires payment"
end
def premium
keywords.includes? "YouTube Red"
end
- def captions : Array(Caption)
- return @captions.as(Array(Caption)) if @captions
- captions = info["captions"]?.try &.["playerCaptionsTracklistRenderer"]?.try &.["captionTracks"]?.try &.as_a.map do |caption|
- caption = Caption.from_json(caption.to_json)
- caption.name.simpleText = caption.name.simpleText.split(" - ")[0]
- caption
+ def captions : Array(Invidious::Videos::Captions::Metadata)
+ if @captions.empty? && @info.has_key?("captions")
+ @captions = Invidious::Videos::Captions::Metadata.from_yt_json(info["captions"])
end
- captions ||= [] of Caption
- @captions = captions
- return @captions.as(Array(Caption))
- end
-
- def description
- description = info["microformat"]?.try &.["playerMicroformatRenderer"]?
- .try &.["description"]?.try &.["simpleText"]?.try &.as_s || ""
- end
-
- # TODO
- def description=(value : String)
- @description = value
- end
-
- def description_html
- info["descriptionHtml"]?.try &.as_s || "<p></p>"
- end
-
- def description_html=(value : String)
- info["descriptionHtml"] = JSON::Any.new(value)
- end
- def short_description
- info["shortDescription"]?.try &.as_s? || ""
+ return @captions
end
def hls_manifest_url : String?
- info["streamingData"]?.try &.["hlsManifestUrl"]?.try &.as_s
+ info.dig?("streamingData", "hlsManifestUrl").try &.as_s
end
- def dash_manifest_url
- info["streamingData"]?.try &.["dashManifestUrl"]?.try &.as_s
- end
+ def dash_manifest_url : String?
+ raw_dash_url = info.dig?("streamingData", "dashManifestUrl").try &.as_s
+ return nil if raw_dash_url.nil?
- def genre : String
- info["genre"]?.try &.as_s || ""
- end
+ # Use manifest v5 parameter to reduce file size
+ # See https://github.com/iv-org/invidious/issues/4186
+ dash_url = URI.parse(raw_dash_url)
+ dash_query = dash_url.query || ""
- def genre_url : String?
- info["genreUcid"]? ? "/channel/#{info["genreUcid"]}" : nil
- end
+ if dash_query.empty?
+ dash_url.path = "#{dash_url.path}/mpd_version/5"
+ else
+ dash_url.query = "#{dash_query}&mpd_version=5"
+ end
- def license : String?
- info["license"]?.try &.as_s
+ return dash_url.to_s
end
- def is_family_friendly : Bool
- info["microformat"]?.try &.["playerMicroformatRenderer"]["isFamilySafe"]?.try &.as_bool || false
+ def genre_url : String?
+ info["genreUcid"].try &.as_s? ? "/channel/#{info["genreUcid"]}" : nil
end
- def wilson_score : Float64
- ci_lower_bound(likes, likes + dislikes).round(4)
+ def vr? : Bool?
+ return {"EQUIRECTANGULAR", "MESH"}.includes? self.projection_type
end
- def engagement : Float64
- (((likes + dislikes) / views) * 100).round(4)
+ def projection_type : String?
+ return info.dig?("streamingData", "adaptiveFormats", 0, "projectionType").try &.as_s
end
def reason : String?
info["reason"]?.try &.as_s
end
- def session_token : String?
- info["sessionToken"]?.try &.as_s?
+ def music : Array(VideoMusic)
+ info["music"].as_a.map { |music_json|
+ VideoMusic.new(
+ music_json["song"].as_s,
+ music_json["album"].as_s,
+ music_json["artist"].as_s,
+ music_json["license"].as_s
+ )
+ }
end
-end
-struct CaptionName
- include JSON::Serializable
+ # Macros defining getters/setters for various types of data
- property simpleText : String
-end
+ private macro getset_string(name)
+ # Return {{name.stringify}} from `info`
+ def {{name.id.underscore}} : String
+ return info[{{name.stringify}}]?.try &.as_s || ""
+ end
-struct Caption
- include JSON::Serializable
+ # Update {{name.stringify}} into `info`
+ def {{name.id.underscore}}=(value : String)
+ info[{{name.stringify}}] = JSON::Any.new(value)
+ end
- property name : CaptionName
- property baseUrl : String
- property languageCode : String
-end
+ {% if flag?(:debug_macros) %} {{debug}} {% end %}
+ end
-class VideoRedirect < Exception
- property video_id : String
+ private macro getset_string_array(name)
+ # Return {{name.stringify}} from `info`
+ def {{name.id.underscore}} : Array(String)
+ return info[{{name.stringify}}]?.try &.as_a.map &.as_s || [] of String
+ end
+
+ # Update {{name.stringify}} into `info`
+ def {{name.id.underscore}}=(value : Array(String))
+ info[{{name.stringify}}] = JSON::Any.new(value)
+ end
- def initialize(@video_id)
+ {% if flag?(:debug_macros) %} {{debug}} {% end %}
end
-end
-def parse_related(r : JSON::Any) : JSON::Any?
- # TODO: r["endScreenPlaylistRenderer"], etc.
- return if !r["endScreenVideoRenderer"]?
- r = r["endScreenVideoRenderer"].as_h
-
- return if !r["lengthInSeconds"]?
-
- rv = {} of String => JSON::Any
- rv["author"] = r["shortBylineText"]["runs"][0]?.try &.["text"] || JSON::Any.new("")
- rv["ucid"] = r["shortBylineText"]["runs"][0]?.try &.["navigationEndpoint"]["browseEndpoint"]["browseId"] || JSON::Any.new("")
- rv["author_url"] = JSON::Any.new("/channel/#{rv["ucid"]}")
- rv["length_seconds"] = JSON::Any.new(r["lengthInSeconds"].as_i.to_s)
- rv["title"] = r["title"]["simpleText"]
- rv["short_view_count_text"] = JSON::Any.new(r["shortViewCountText"]?.try &.["simpleText"]?.try &.as_s || "")
- rv["view_count"] = JSON::Any.new(r["title"]["accessibility"]?.try &.["accessibilityData"]["label"].as_s.match(/(?<views>[1-9](\d+,?)*) views/).try &.["views"].gsub(/\D/, "") || "")
- rv["id"] = r["videoId"]
- JSON::Any.new(rv)
-end
+ {% for op, type in {i32: Int32, i64: Int64} %}
+ private macro getset_{{op}}(name)
+ def \{{name.id.underscore}} : {{type}}
+ return info[\{{name.stringify}}]?.try &.as_i64.to_{{op}} || 0_{{op}}
+ end
-def extract_polymer_config(body)
- params = {} of String => JSON::Any
- player_response = body.match(/(window\["ytInitialPlayerResponse"\]|var\sytInitialPlayerResponse)\s*=\s*(?<info>{.*?});\s*var\s*meta/m)
- .try { |r| JSON.parse(r["info"]).as_h }
-
- if body.includes?("To continue with your YouTube experience, please fill out the form below.") ||
- body.includes?("https://www.google.com/sorry/index")
- params["reason"] = JSON::Any.new("Could not extract video info. Instance is likely blocked.")
- elsif !player_response
- params["reason"] = JSON::Any.new("Video unavailable.")
- elsif player_response["playabilityStatus"]?.try &.["status"]?.try &.as_s != "OK"
- reason = player_response["playabilityStatus"]["errorScreen"]?.try &.["playerErrorMessageRenderer"]?.try &.["subreason"]?.try { |s| s["simpleText"]?.try &.as_s || s["runs"].as_a.map { |r| r["text"] }.join("") } ||
- player_response["playabilityStatus"]["reason"].as_s
- params["reason"] = JSON::Any.new(reason)
- end
+ def \{{name.id.underscore}}=(value : Int)
+ info[\{{name.stringify}}] = JSON::Any.new(value.to_i64)
+ end
- session_token_json_encoded = body.match(/"XSRF_TOKEN":"(?<session_token>[^"]+)"/).try &.["session_token"]? || ""
- params["sessionToken"] = JSON.parse(%({"key": "#{session_token_json_encoded}"}))["key"]
- params["shortDescription"] = JSON::Any.new(body.match(/"og:description" content="(?<description>[^"]+)"/).try &.["description"]?)
+ \{% if flag?(:debug_macros) %} \{{debug}} \{% end %}
+ end
+ {% end %}
- return params if !player_response
+ private macro getset_bool(name)
+ # Return {{name.stringify}} from `info`
+ def {{name.id.underscore}} : Bool
+ return info[{{name.stringify}}]?.try &.as_bool || false
+ end
+
+ # Update {{name.stringify}} into `info`
+ def {{name.id.underscore}}=(value : Bool)
+ info[{{name.stringify}}] = JSON::Any.new(value)
+ end
- {"captions", "microformat", "playabilityStatus", "storyboards", "videoDetails"}.each do |f|
- params[f] = player_response[f] if player_response[f]?
+ {% if flag?(:debug_macros) %} {{debug}} {% end %}
end
- yt_initial_data = extract_initial_data(body)
-
- params["relatedVideos"] = yt_initial_data.try &.["playerOverlays"]?.try &.["playerOverlayRenderer"]?
- .try &.["endScreen"]?.try &.["watchNextEndScreenRenderer"]?.try &.["results"]?.try &.as_a.compact_map { |r|
- parse_related r
- }.try { |a| JSON::Any.new(a) } || yt_initial_data.try &.["webWatchNextResponseExtensionData"]?.try &.["relatedVideoArgs"]?
- .try &.as_s.split(",").map { |r|
- r = HTTP::Params.parse(r).to_h
- JSON::Any.new(Hash.zip(r.keys, r.values.map { |v| JSON::Any.new(v) }))
- }.try { |a| JSON::Any.new(a) } || JSON::Any.new([] of JSON::Any)
-
- primary_results = yt_initial_data.try &.["contents"]?.try &.["twoColumnWatchNextResults"]?.try &.["results"]?
- .try &.["results"]?.try &.["contents"]?
- sentiment_bar = primary_results.try &.as_a.select { |object| object["videoPrimaryInfoRenderer"]? }[0]?
- .try &.["videoPrimaryInfoRenderer"]?
- .try &.["sentimentBar"]?
- .try &.["sentimentBarRenderer"]?
- .try &.["tooltip"]?
- .try &.as_s
-
- likes, dislikes = sentiment_bar.try &.split(" / ", 2).map &.gsub(/\D/, "").to_i64 || {0_i64, 0_i64}
- params["likes"] = JSON::Any.new(likes)
- params["dislikes"] = JSON::Any.new(dislikes)
-
- params["descriptionHtml"] = JSON::Any.new(primary_results.try &.as_a.select { |object| object["videoSecondaryInfoRenderer"]? }[0]?
- .try &.["videoSecondaryInfoRenderer"]?.try &.["description"]?.try &.["runs"]?
- .try &.as_a.try { |t| content_to_comment_html(t).gsub("\n", "<br/>") } || "<p></p>")
-
- metadata = primary_results.try &.as_a.select { |object| object["videoSecondaryInfoRenderer"]? }[0]?
- .try &.["videoSecondaryInfoRenderer"]?
- .try &.["metadataRowContainer"]?
- .try &.["metadataRowContainerRenderer"]?
- .try &.["rows"]?
- .try &.as_a
-
- params["genre"] = params["microformat"]?.try &.["playerMicroformatRenderer"]?.try &.["category"]? || JSON::Any.new("")
- params["genreUrl"] = JSON::Any.new(nil)
-
- metadata.try &.each do |row|
- title = row["metadataRowRenderer"]?.try &.["title"]?.try &.["simpleText"]?.try &.as_s
- contents = row["metadataRowRenderer"]?
- .try &.["contents"]?
- .try &.as_a[0]?
-
- if title.try &.== "Category"
- contents = contents.try &.["runs"]?
- .try &.as_a[0]?
-
- params["genre"] = JSON::Any.new(contents.try &.["text"]?.try &.as_s || "")
- params["genreUcid"] = JSON::Any.new(contents.try &.["navigationEndpoint"]?.try &.["browseEndpoint"]?
- .try &.["browseId"]?.try &.as_s || "")
- elsif title.try &.== "License"
- contents = contents.try &.["runs"]?
- .try &.as_a[0]?
-
- params["license"] = JSON::Any.new(contents.try &.["text"]?.try &.as_s || "")
- elsif title.try &.== "Licensed to YouTube by"
- params["license"] = JSON::Any.new(contents.try &.["simpleText"]?.try &.as_s || "")
+ # Macro to generate ? and = accessor methods for attributes in `info`
+ private macro predicate_bool(method_name, name)
+ # Return {{name.stringify}} from `info`
+ def {{method_name.id.underscore}}? : Bool
+ return info[{{name.stringify}}]?.try &.as_bool || false
end
- end
- author_info = primary_results.try &.as_a.select { |object| object["videoSecondaryInfoRenderer"]? }[0]?
- .try &.["videoSecondaryInfoRenderer"]?.try &.["owner"]?.try &.["videoOwnerRenderer"]?
+ # Update {{name.stringify}} into `info`
+ def {{method_name.id.underscore}}=(value : Bool)
+ info[{{name.stringify}}] = JSON::Any.new(value)
+ end
- params["authorThumbnail"] = JSON::Any.new(author_info.try &.["thumbnail"]?
- .try &.["thumbnails"]?.try &.as_a[0]?.try &.["url"]?
- .try &.as_s || "")
+ {% if flag?(:debug_macros) %} {{debug}} {% end %}
+ end
- params["subCountText"] = JSON::Any.new(author_info.try &.["subscriberCountText"]?
- .try { |t| t["simpleText"]? || t["runs"]?.try &.[0]?.try &.["text"]? }.try &.as_s.split(" ", 2)[0] || "-")
+ # Method definitions, using the macros above
- initial_data = body.match(/ytplayer\.config\s*=\s*(?<info>.*?);ytplayer\.web_player_context_config/)
- .try { |r| JSON.parse(r["info"]) }.try &.["args"]["player_response"]?
- .try &.as_s?.try &.try { |r| JSON.parse(r).as_h }
+ getset_string author
+ getset_string authorThumbnail
+ getset_string description
+ getset_string descriptionHtml
+ getset_string genre
+ getset_string genreUcid
+ getset_string license
+ getset_string shortDescription
+ getset_string subCountText
+ getset_string title
+ getset_string ucid
- if initial_data
- {"playabilityStatus", "streamingData"}.each do |f|
- params[f] = initial_data[f] if initial_data[f]?
- end
- else
- {"playabilityStatus", "streamingData"}.each do |f|
- params[f] = player_response[f] if player_response[f]?
- end
- end
+ getset_string_array allowedRegions
+ getset_string_array keywords
+
+ getset_i32 lengthSeconds
+ getset_i64 likes
+ getset_i64 views
- params
+ # TODO: Make predicate_bool the default as to adhere to Crystal conventions
+ getset_bool allowRatings
+ getset_bool authorVerified
+ getset_bool isFamilyFriendly
+ getset_bool isListed
+ predicate_bool upcoming, isUpcoming
end
-def get_video(id, db, refresh = true, region = nil, force_refresh = false)
- if (video = db.query_one?("SELECT * FROM videos WHERE id = $1", id, as: Video)) && !region
+def get_video(id, refresh = true, region = nil, force_refresh = false)
+ if (video = Invidious::Database::Videos.select(id)) && !region
# If record was last updated over 10 minutes ago, or video has since premiered,
# refresh (expire param in response lasts for 6 hours)
if (refresh &&
(Time.utc - video.updated > 10.minutes) ||
(video.premiere_timestamp.try &.< Time.utc)) ||
- force_refresh
+ force_refresh ||
+ video.schema_version != Video::SCHEMA_VERSION # cache control
begin
video = fetch_video(id, region)
- db.exec("UPDATE videos SET (id, info, updated) = ($1, $2, $3) WHERE id = $1", video.id, video.info.to_json, video.updated)
+ Invidious::Database::Videos.update(video)
rescue ex
- db.exec("DELETE FROM videos * WHERE id = $1", id)
+ Invidious::Database::Videos.delete(id)
raise ex
end
end
else
video = fetch_video(id, region)
- if !region
- db.exec("INSERT INTO videos VALUES ($1, $2, $3) ON CONFLICT (id) DO NOTHING", video.id, video.info.to_json, video.updated)
- end
+ Invidious::Database::Videos.insert(video) if !region
end
return video
+rescue DB::Error
+ # Avoid common `DB::PoolRetryAttemptsExceeded` error and friends
+ # Note: All DB errors inherit from `DB::Error`
+ return fetch_video(id, region)
end
def fetch_video(id, region)
- response = YT_POOL.client(region, &.get("/watch?v=#{id}&gl=US&hl=en&has_verified=1&bpctr=9999999999"))
-
- if md = response.headers["location"]?.try &.match(/v=(?<id>[a-zA-Z0-9_-]{11})/)
- raise VideoRedirect.new(video_id: md["id"])
- end
-
- info = extract_polymer_config(response.body)
- info["cookie"] = JSON::Any.new(response.cookies.to_h.transform_values { |v| JSON::Any.new(v.value) })
- allowed_regions = info["microformat"]?.try &.["playerMicroformatRenderer"]["availableCountries"]?.try &.as_a.map &.as_s || [] of String
-
- # Check for region-blocks
- if info["reason"]?.try &.as_s.includes?("your country")
- bypass_regions = PROXY_LIST.keys & allowed_regions
- if !bypass_regions.empty?
- region = bypass_regions[rand(bypass_regions.size)]
- response = YT_POOL.client(region, &.get("/watch?v=#{id}&gl=US&hl=en&has_verified=1&bpctr=9999999999"))
-
- region_info = extract_polymer_config(response.body)
- region_info["region"] = JSON::Any.new(region) if region
- region_info["cookie"] = JSON::Any.new(response.cookies.to_h.transform_values { |v| JSON::Any.new(v.value) })
- info = region_info if !region_info["reason"]?
+ info = extract_video_info(video_id: id)
+
+ if reason = info["reason"]?
+ if reason == "Video unavailable"
+ raise NotFoundException.new(reason.as_s || "")
+ elsif !reason.as_s.starts_with? "Premieres"
+ # dont error when it's a premiere.
+ # we already parsed most of the data and display the premiere date
+ raise InfoException.new(reason.as_s || "")
end
end
- # Try to pull streams from embed URL
- if info["reason"]?
- embed_page = YT_POOL.client &.get("/embed/#{id}").body
- sts = embed_page.match(/"sts"\s*:\s*(?<sts>\d+)/).try &.["sts"]? || ""
- embed_info = HTTP::Params.parse(YT_POOL.client &.get("/get_video_info?html5=1&video_id=#{id}&eurl=https://youtube.googleapis.com/v/#{id}&gl=US&hl=en&sts=#{sts}").body)
-
- if embed_info["player_response"]?
- player_response = JSON.parse(embed_info["player_response"])
- {"captions", "microformat", "playabilityStatus", "streamingData", "videoDetails", "storyboards"}.each do |f|
- info[f] = player_response[f] if player_response[f]?
- end
- end
-
- initial_data = JSON.parse(embed_info["watch_next_response"]) if embed_info["watch_next_response"]?
-
- info["relatedVideos"] = initial_data.try &.["playerOverlays"]?.try &.["playerOverlayRenderer"]?
- .try &.["endScreen"]?.try &.["watchNextEndScreenRenderer"]?.try &.["results"]?.try &.as_a.compact_map { |r|
- parse_related r
- }.try { |a| JSON::Any.new(a) } || embed_info["rvs"]?.try &.split(",").map { |r|
- r = HTTP::Params.parse(r).to_h
- JSON::Any.new(Hash.zip(r.keys, r.values.map { |v| JSON::Any.new(v) }))
- }.try { |a| JSON::Any.new(a) } || JSON::Any.new([] of JSON::Any)
- end
-
- raise InfoException.new(info["reason"]?.try &.as_s || "") if !info["videoDetails"]?
-
video = Video.new({
id: id,
info: info,
@@ -1017,11 +345,7 @@ def fetch_video(id, region)
return video
end
-def itag_to_metadata?(itag : JSON::Any)
- return VIDEO_FORMATS[itag.to_s]?
-end
-
-def process_continuation(db, query, plid, id)
+def process_continuation(query, plid, id)
continuation = nil
if plid
if index = query["index"]?.try &.to_i?
@@ -1035,125 +359,6 @@ def process_continuation(db, query, plid, id)
continuation
end
-def process_video_params(query, preferences)
- annotations = query["iv_load_policy"]?.try &.to_i?
- autoplay = query["autoplay"]?.try { |q| (q == "true" || q == "1").to_unsafe }
- comments = query["comments"]?.try &.split(",").map { |a| a.downcase }
- continue = query["continue"]?.try { |q| (q == "true" || q == "1").to_unsafe }
- continue_autoplay = query["continue_autoplay"]?.try { |q| (q == "true" || q == "1").to_unsafe }
- listen = query["listen"]?.try { |q| (q == "true" || q == "1").to_unsafe }
- local = query["local"]?.try { |q| (q == "true" || q == "1").to_unsafe }
- player_style = query["player_style"]?
- preferred_captions = query["subtitles"]?.try &.split(",").map { |a| a.downcase }
- quality = query["quality"]?
- quality_dash = query["quality_dash"]?
- region = query["region"]?
- related_videos = query["related_videos"]?.try { |q| (q == "true" || q == "1").to_unsafe }
- speed = query["speed"]?.try &.rchop("x").to_f?
- video_loop = query["loop"]?.try { |q| (q == "true" || q == "1").to_unsafe }
- extend_desc = query["extend_desc"]?.try { |q| (q == "true" || q == "1").to_unsafe }
- volume = query["volume"]?.try &.to_i?
-
- if preferences
- # region ||= preferences.region
- annotations ||= preferences.annotations.to_unsafe
- autoplay ||= preferences.autoplay.to_unsafe
- comments ||= preferences.comments
- continue ||= preferences.continue.to_unsafe
- continue_autoplay ||= preferences.continue_autoplay.to_unsafe
- listen ||= preferences.listen.to_unsafe
- local ||= preferences.local.to_unsafe
- player_style ||= preferences.player_style
- preferred_captions ||= preferences.captions
- quality ||= preferences.quality
- quality_dash ||= preferences.quality_dash
- related_videos ||= preferences.related_videos.to_unsafe
- speed ||= preferences.speed
- video_loop ||= preferences.video_loop.to_unsafe
- extend_desc ||= preferences.extend_desc.to_unsafe
- volume ||= preferences.volume
- end
-
- annotations ||= CONFIG.default_user_preferences.annotations.to_unsafe
- autoplay ||= CONFIG.default_user_preferences.autoplay.to_unsafe
- comments ||= CONFIG.default_user_preferences.comments
- continue ||= CONFIG.default_user_preferences.continue.to_unsafe
- continue_autoplay ||= CONFIG.default_user_preferences.continue_autoplay.to_unsafe
- listen ||= CONFIG.default_user_preferences.listen.to_unsafe
- local ||= CONFIG.default_user_preferences.local.to_unsafe
- player_style ||= CONFIG.default_user_preferences.player_style
- preferred_captions ||= CONFIG.default_user_preferences.captions
- quality ||= CONFIG.default_user_preferences.quality
- quality_dash ||= CONFIG.default_user_preferences.quality_dash
- related_videos ||= CONFIG.default_user_preferences.related_videos.to_unsafe
- speed ||= CONFIG.default_user_preferences.speed
- video_loop ||= CONFIG.default_user_preferences.video_loop.to_unsafe
- extend_desc ||= CONFIG.default_user_preferences.extend_desc.to_unsafe
- volume ||= CONFIG.default_user_preferences.volume
-
- annotations = annotations == 1
- autoplay = autoplay == 1
- continue = continue == 1
- continue_autoplay = continue_autoplay == 1
- listen = listen == 1
- local = local == 1
- related_videos = related_videos == 1
- video_loop = video_loop == 1
- extend_desc = extend_desc == 1
-
- if CONFIG.disabled?("dash") && quality == "dash"
- quality = "high"
- end
-
- if CONFIG.disabled?("local") && local
- local = false
- end
-
- if start = query["t"]? || query["time_continue"]? || query["start"]?
- video_start = decode_time(start)
- end
- video_start ||= 0
-
- if query["end"]?
- video_end = decode_time(query["end"])
- end
- video_end ||= -1
-
- raw = query["raw"]?.try &.to_i?
- raw ||= 0
- raw = raw == 1
-
- controls = query["controls"]?.try &.to_i?
- controls ||= 1
- controls = controls >= 1
-
- params = VideoPreferences.new({
- annotations: annotations,
- autoplay: autoplay,
- comments: comments,
- continue: continue,
- continue_autoplay: continue_autoplay,
- controls: controls,
- listen: listen,
- local: local,
- player_style: player_style,
- preferred_captions: preferred_captions,
- quality: quality,
- quality_dash: quality_dash,
- raw: raw,
- region: region,
- related_videos: related_videos,
- speed: speed,
- video_end: video_end,
- video_loop: video_loop,
- extend_desc: extend_desc,
- video_start: video_start,
- volume: volume,
- })
-
- return params
-end
-
def build_thumbnails(id)
return {
{host: HOST_URL, height: 720, width: 1280, name: "maxres", url: "maxres"},
@@ -1167,34 +372,3 @@ def build_thumbnails(id)
{host: HOST_URL, height: 90, width: 120, name: "end", url: "3"},
}
end
-
-def generate_thumbnails(json, id)
- json.array do
- build_thumbnails(id).each do |thumbnail|
- json.object do
- json.field "quality", thumbnail[:name]
- json.field "url", "#{thumbnail[:host]}/vi/#{id}/#{thumbnail["url"]}.jpg"
- json.field "width", thumbnail[:width]
- json.field "height", thumbnail[:height]
- end
- end
- end
-end
-
-def generate_storyboards(json, id, storyboards)
- json.array do
- storyboards.each do |storyboard|
- json.object do
- json.field "url", "/api/v1/storyboards/#{id}?width=#{storyboard[:width]}&height=#{storyboard[:height]}"
- json.field "templateUrl", storyboard[:url]
- json.field "width", storyboard[:width]
- json.field "height", storyboard[:height]
- json.field "count", storyboard[:count]
- json.field "interval", storyboard[:interval]
- json.field "storyboardWidth", storyboard[:storyboard_width]
- json.field "storyboardHeight", storyboard[:storyboard_height]
- json.field "storyboardCount", storyboard[:storyboard_count]
- end
- end
- end
-end
diff --git a/src/invidious/videos/caption.cr b/src/invidious/videos/caption.cr
new file mode 100644
index 00000000..c811cfe1
--- /dev/null
+++ b/src/invidious/videos/caption.cr
@@ -0,0 +1,224 @@
+require "json"
+
+module Invidious::Videos
+ module Captions
+ struct Metadata
+ property name : String
+ property language_code : String
+ property base_url : String
+
+ property auto_generated : Bool
+
+ def initialize(@name, @language_code, @base_url, @auto_generated)
+ end
+
+ # Parse the JSON structure from Youtube
+ def self.from_yt_json(container : JSON::Any) : Array(Captions::Metadata)
+ caption_tracks = container
+ .dig?("playerCaptionsTracklistRenderer", "captionTracks")
+ .try &.as_a
+
+ captions_list = [] of Captions::Metadata
+ return captions_list if caption_tracks.nil?
+
+ caption_tracks.each do |caption|
+ name = caption["name"]["simpleText"]? || caption["name"]["runs"][0]["text"]
+ name = name.to_s.split(" - ")[0]
+
+ language_code = caption["languageCode"].to_s
+ base_url = caption["baseUrl"].to_s
+
+ auto_generated = (caption["kind"]? == "asr")
+
+ captions_list << Captions::Metadata.new(name, language_code, base_url, auto_generated)
+ end
+
+ return captions_list
+ end
+
+ def timedtext_to_vtt(timedtext : String, tlang = nil) : String
+ # In the future, we could just directly work with the url. This is more of a POC
+ cues = [] of XML::Node
+ tree = XML.parse(timedtext)
+ tree = tree.children.first
+
+ tree.children.each do |item|
+ if item.name == "body"
+ item.children.each do |cue|
+ if cue.name == "p" && !(cue.children.size == 1 && cue.children[0].content == "\n")
+ cues << cue
+ end
+ end
+ break
+ end
+ end
+
+ settings_field = {
+ "Kind" => "captions",
+ "Language" => "#{tlang || @language_code}",
+ }
+
+ result = WebVTT.build(settings_field) do |vtt|
+ cues.each_with_index do |node, i|
+ start_time = node["t"].to_f.milliseconds
+
+ duration = node["d"]?.try &.to_f.milliseconds
+
+ duration ||= start_time
+
+ if cues.size > i + 1
+ end_time = cues[i + 1]["t"].to_f.milliseconds
+ else
+ end_time = start_time + duration
+ end
+
+ text = String.build do |io|
+ node.children.each do |s|
+ io << s.content
+ end
+ end
+
+ vtt.cue(start_time, end_time, text)
+ end
+ end
+
+ return result
+ end
+ end
+
+ # List of all caption languages available on Youtube.
+ LANGUAGES = {
+ "",
+ "English",
+ "English (auto-generated)",
+ "English (United Kingdom)",
+ "English (United States)",
+ "Afrikaans",
+ "Albanian",
+ "Amharic",
+ "Arabic",
+ "Armenian",
+ "Azerbaijani",
+ "Bangla",
+ "Basque",
+ "Belarusian",
+ "Bosnian",
+ "Bulgarian",
+ "Burmese",
+ "Cantonese (Hong Kong)",
+ "Catalan",
+ "Cebuano",
+ "Chinese",
+ "Chinese (China)",
+ "Chinese (Hong Kong)",
+ "Chinese (Simplified)",
+ "Chinese (Taiwan)",
+ "Chinese (Traditional)",
+ "Corsican",
+ "Croatian",
+ "Czech",
+ "Danish",
+ "Dutch",
+ "Dutch (auto-generated)",
+ "Esperanto",
+ "Estonian",
+ "Filipino",
+ "Filipino (auto-generated)",
+ "Finnish",
+ "French",
+ "French (auto-generated)",
+ "Galician",
+ "Georgian",
+ "German",
+ "German (auto-generated)",
+ "Greek",
+ "Gujarati",
+ "Haitian Creole",
+ "Hausa",
+ "Hawaiian",
+ "Hebrew",
+ "Hindi",
+ "Hmong",
+ "Hungarian",
+ "Icelandic",
+ "Igbo",
+ "Indonesian",
+ "Indonesian (auto-generated)",
+ "Interlingue",
+ "Irish",
+ "Italian",
+ "Italian (auto-generated)",
+ "Japanese",
+ "Japanese (auto-generated)",
+ "Javanese",
+ "Kannada",
+ "Kazakh",
+ "Khmer",
+ "Korean",
+ "Korean (auto-generated)",
+ "Kurdish",
+ "Kyrgyz",
+ "Lao",
+ "Latin",
+ "Latvian",
+ "Lithuanian",
+ "Luxembourgish",
+ "Macedonian",
+ "Malagasy",
+ "Malay",
+ "Malayalam",
+ "Maltese",
+ "Maori",
+ "Marathi",
+ "Mongolian",
+ "Nepali",
+ "Norwegian Bokmål",
+ "Nyanja",
+ "Pashto",
+ "Persian",
+ "Polish",
+ "Portuguese",
+ "Portuguese (auto-generated)",
+ "Portuguese (Brazil)",
+ "Punjabi",
+ "Romanian",
+ "Russian",
+ "Russian (auto-generated)",
+ "Samoan",
+ "Scottish Gaelic",
+ "Serbian",
+ "Shona",
+ "Sindhi",
+ "Sinhala",
+ "Slovak",
+ "Slovenian",
+ "Somali",
+ "Southern Sotho",
+ "Spanish",
+ "Spanish (auto-generated)",
+ "Spanish (Latin America)",
+ "Spanish (Mexico)",
+ "Spanish (Spain)",
+ "Sundanese",
+ "Swahili",
+ "Swedish",
+ "Tajik",
+ "Tamil",
+ "Telugu",
+ "Thai",
+ "Turkish",
+ "Turkish (auto-generated)",
+ "Ukrainian",
+ "Urdu",
+ "Uzbek",
+ "Vietnamese",
+ "Vietnamese (auto-generated)",
+ "Welsh",
+ "Western Frisian",
+ "Xhosa",
+ "Yiddish",
+ "Yoruba",
+ "Zulu",
+ }
+ end
+end
diff --git a/src/invidious/videos/clip.cr b/src/invidious/videos/clip.cr
new file mode 100644
index 00000000..29c57182
--- /dev/null
+++ b/src/invidious/videos/clip.cr
@@ -0,0 +1,22 @@
+require "json"
+
+# returns start_time, end_time and clip_title
+def parse_clip_parameters(params) : {Float64?, Float64?, String?}
+ decoded_protobuf = params.try { |i| URI.decode_www_form(i) }
+ .try { |i| Base64.decode(i) }
+ .try { |i| IO::Memory.new(i) }
+ .try { |i| Protodec::Any.parse(i) }
+
+ start_time = decoded_protobuf
+ .try(&.["50:0:embedded"]["2:1:varint"].as_i64)
+ .try { |i| i/1000 }
+
+ end_time = decoded_protobuf
+ .try(&.["50:0:embedded"]["3:2:varint"].as_i64)
+ .try { |i| i/1000 }
+
+ clip_title = decoded_protobuf
+ .try(&.["50:0:embedded"]["4:3:string"].as_s)
+
+ return start_time, end_time, clip_title
+end
diff --git a/src/invidious/videos/description.cr b/src/invidious/videos/description.cr
new file mode 100644
index 00000000..1371bebb
--- /dev/null
+++ b/src/invidious/videos/description.cr
@@ -0,0 +1,82 @@
+require "json"
+require "uri"
+
+private def copy_string(str : String::Builder, iter : Iterator, count : Int) : Int
+ copied = 0
+ while copied < count
+ cp = iter.next
+ break if cp.is_a?(Iterator::Stop)
+
+ if cp == 0x26 # Ampersand (&)
+ str << "&amp;"
+ elsif cp == 0x27 # Single quote (')
+ str << "&#39;"
+ elsif cp == 0x22 # Double quote (")
+ str << "&quot;"
+ elsif cp == 0x3C # Less-than (<)
+ str << "&lt;"
+ elsif cp == 0x3E # Greater than (>)
+ str << "&gt;"
+ else
+ str << cp.chr
+ end
+
+ # A codepoint from the SMP counts twice
+ copied += 1 if cp > 0xFFFF
+ copied += 1
+ end
+
+ return copied
+end
+
+def parse_description(desc, video_id : String) : String?
+ return "" if desc.nil?
+
+ content = desc["content"].as_s
+ return "" if content.empty?
+
+ commands = desc["commandRuns"]?.try &.as_a
+ if commands.nil?
+ # Slightly faster than HTML.escape, as we're only doing one pass on
+ # the string instead of five for the standard library
+ return String.build do |str|
+ copy_string(str, content.each_codepoint, content.size)
+ end
+ end
+
+ # Not everything is stored in UTF-8 on youtube's side. The SMP codepoints
+ # (0x10000 and above) are encoded as UTF-16 surrogate pairs, which are
+ # automatically decoded by the JSON parser. It means that we need to count
+ # copied byte in a special manner, preventing the use of regular string copy.
+ iter = content.each_codepoint
+
+ index = 0
+
+ return String.build do |str|
+ commands.each do |command|
+ cmd_start = command["startIndex"].as_i
+ cmd_length = command["length"].as_i
+
+ # Copy the text chunk between this command and the previous if needed.
+ length = cmd_start - index
+ index += copy_string(str, iter, length)
+
+ # We need to copy the command's text using the iterator
+ # and the special function defined above.
+ cmd_content = String.build(cmd_length) do |str2|
+ copy_string(str2, iter, cmd_length)
+ end
+
+ link = cmd_content
+ if on_tap = command.dig?("onTap", "innertubeCommand")
+ link = parse_link_endpoint(on_tap, cmd_content, video_id)
+ end
+ str << link
+ index += cmd_length
+ end
+
+ # Copy the end of the string (past the last command).
+ remaining_length = content.size - index
+ copy_string(str, iter, remaining_length) if remaining_length > 0
+ end
+end
diff --git a/src/invidious/videos/formats.cr b/src/invidious/videos/formats.cr
new file mode 100644
index 00000000..e98e7257
--- /dev/null
+++ b/src/invidious/videos/formats.cr
@@ -0,0 +1,116 @@
+module Invidious::Videos::Formats
+ def self.itag_to_metadata?(itag : JSON::Any)
+ return FORMATS[itag.to_s]?
+ end
+
+ # See https://github.com/rg3/youtube-dl/blob/master/youtube_dl/extractor/youtube.py#L380-#L476
+ private FORMATS = {
+ "5" => {"ext" => "flv", "width" => 400, "height" => 240, "acodec" => "mp3", "abr" => 64, "vcodec" => "h263"},
+ "6" => {"ext" => "flv", "width" => 450, "height" => 270, "acodec" => "mp3", "abr" => 64, "vcodec" => "h263"},
+ "13" => {"ext" => "3gp", "acodec" => "aac", "vcodec" => "mp4v"},
+ "17" => {"ext" => "3gp", "width" => 176, "height" => 144, "acodec" => "aac", "abr" => 24, "vcodec" => "mp4v"},
+ "18" => {"ext" => "mp4", "width" => 640, "height" => 360, "acodec" => "aac", "abr" => 96, "vcodec" => "h264"},
+ "22" => {"ext" => "mp4", "width" => 1280, "height" => 720, "acodec" => "aac", "abr" => 192, "vcodec" => "h264"},
+ "34" => {"ext" => "flv", "width" => 640, "height" => 360, "acodec" => "aac", "abr" => 128, "vcodec" => "h264"},
+ "35" => {"ext" => "flv", "width" => 854, "height" => 480, "acodec" => "aac", "abr" => 128, "vcodec" => "h264"},
+
+ "36" => {"ext" => "3gp", "width" => 320, "acodec" => "aac", "vcodec" => "mp4v"},
+ "37" => {"ext" => "mp4", "width" => 1920, "height" => 1080, "acodec" => "aac", "abr" => 192, "vcodec" => "h264"},
+ "38" => {"ext" => "mp4", "width" => 4096, "height" => 3072, "acodec" => "aac", "abr" => 192, "vcodec" => "h264"},
+ "43" => {"ext" => "webm", "width" => 640, "height" => 360, "acodec" => "vorbis", "abr" => 128, "vcodec" => "vp8"},
+ "44" => {"ext" => "webm", "width" => 854, "height" => 480, "acodec" => "vorbis", "abr" => 128, "vcodec" => "vp8"},
+ "45" => {"ext" => "webm", "width" => 1280, "height" => 720, "acodec" => "vorbis", "abr" => 192, "vcodec" => "vp8"},
+ "46" => {"ext" => "webm", "width" => 1920, "height" => 1080, "acodec" => "vorbis", "abr" => 192, "vcodec" => "vp8"},
+ "59" => {"ext" => "mp4", "width" => 854, "height" => 480, "acodec" => "aac", "abr" => 128, "vcodec" => "h264"},
+ "78" => {"ext" => "mp4", "width" => 854, "height" => 480, "acodec" => "aac", "abr" => 128, "vcodec" => "h264"},
+
+ # 3D videos
+ "82" => {"ext" => "mp4", "height" => 360, "format" => "3D", "acodec" => "aac", "abr" => 128, "vcodec" => "h264"},
+ "83" => {"ext" => "mp4", "height" => 480, "format" => "3D", "acodec" => "aac", "abr" => 128, "vcodec" => "h264"},
+ "84" => {"ext" => "mp4", "height" => 720, "format" => "3D", "acodec" => "aac", "abr" => 192, "vcodec" => "h264"},
+ "85" => {"ext" => "mp4", "height" => 1080, "format" => "3D", "acodec" => "aac", "abr" => 192, "vcodec" => "h264"},
+ "100" => {"ext" => "webm", "height" => 360, "format" => "3D", "acodec" => "vorbis", "abr" => 128, "vcodec" => "vp8"},
+ "101" => {"ext" => "webm", "height" => 480, "format" => "3D", "acodec" => "vorbis", "abr" => 192, "vcodec" => "vp8"},
+ "102" => {"ext" => "webm", "height" => 720, "format" => "3D", "acodec" => "vorbis", "abr" => 192, "vcodec" => "vp8"},
+
+ # Apple HTTP Live Streaming
+ "91" => {"ext" => "mp4", "height" => 144, "format" => "HLS", "acodec" => "aac", "abr" => 48, "vcodec" => "h264"},
+ "92" => {"ext" => "mp4", "height" => 240, "format" => "HLS", "acodec" => "aac", "abr" => 48, "vcodec" => "h264"},
+ "93" => {"ext" => "mp4", "height" => 360, "format" => "HLS", "acodec" => "aac", "abr" => 128, "vcodec" => "h264"},
+ "94" => {"ext" => "mp4", "height" => 480, "format" => "HLS", "acodec" => "aac", "abr" => 128, "vcodec" => "h264"},
+ "95" => {"ext" => "mp4", "height" => 720, "format" => "HLS", "acodec" => "aac", "abr" => 256, "vcodec" => "h264"},
+ "96" => {"ext" => "mp4", "height" => 1080, "format" => "HLS", "acodec" => "aac", "abr" => 256, "vcodec" => "h264"},
+ "132" => {"ext" => "mp4", "height" => 240, "format" => "HLS", "acodec" => "aac", "abr" => 48, "vcodec" => "h264"},
+ "151" => {"ext" => "mp4", "height" => 72, "format" => "HLS", "acodec" => "aac", "abr" => 24, "vcodec" => "h264"},
+
+ # DASH mp4 video
+ "133" => {"ext" => "mp4", "height" => 240, "format" => "DASH video", "vcodec" => "h264"},
+ "134" => {"ext" => "mp4", "height" => 360, "format" => "DASH video", "vcodec" => "h264"},
+ "135" => {"ext" => "mp4", "height" => 480, "format" => "DASH video", "vcodec" => "h264"},
+ "136" => {"ext" => "mp4", "height" => 720, "format" => "DASH video", "vcodec" => "h264"},
+ "137" => {"ext" => "mp4", "height" => 1080, "format" => "DASH video", "vcodec" => "h264"},
+ "138" => {"ext" => "mp4", "format" => "DASH video", "vcodec" => "h264"}, # Height can vary (https://github.com/ytdl-org/youtube-dl/issues/4559)
+ "160" => {"ext" => "mp4", "height" => 144, "format" => "DASH video", "vcodec" => "h264"},
+ "212" => {"ext" => "mp4", "height" => 480, "format" => "DASH video", "vcodec" => "h264"},
+ "264" => {"ext" => "mp4", "height" => 1440, "format" => "DASH video", "vcodec" => "h264"},
+ "298" => {"ext" => "mp4", "height" => 720, "format" => "DASH video", "vcodec" => "h264", "fps" => 60},
+ "299" => {"ext" => "mp4", "height" => 1080, "format" => "DASH video", "vcodec" => "h264", "fps" => 60},
+ "266" => {"ext" => "mp4", "height" => 2160, "format" => "DASH video", "vcodec" => "h264"},
+
+ # Dash mp4 audio
+ "139" => {"ext" => "m4a", "format" => "DASH audio", "acodec" => "aac", "abr" => 48, "container" => "m4a_dash"},
+ "140" => {"ext" => "m4a", "format" => "DASH audio", "acodec" => "aac", "abr" => 128, "container" => "m4a_dash"},
+ "141" => {"ext" => "m4a", "format" => "DASH audio", "acodec" => "aac", "abr" => 256, "container" => "m4a_dash"},
+ "256" => {"ext" => "m4a", "format" => "DASH audio", "acodec" => "aac", "container" => "m4a_dash"},
+ "258" => {"ext" => "m4a", "format" => "DASH audio", "acodec" => "aac", "container" => "m4a_dash"},
+ "325" => {"ext" => "m4a", "format" => "DASH audio", "acodec" => "dtse", "container" => "m4a_dash"},
+ "328" => {"ext" => "m4a", "format" => "DASH audio", "acodec" => "ec-3", "container" => "m4a_dash"},
+
+ # Dash webm
+ "167" => {"ext" => "webm", "height" => 360, "width" => 640, "format" => "DASH video", "container" => "webm", "vcodec" => "vp8"},
+ "168" => {"ext" => "webm", "height" => 480, "width" => 854, "format" => "DASH video", "container" => "webm", "vcodec" => "vp8"},
+ "169" => {"ext" => "webm", "height" => 720, "width" => 1280, "format" => "DASH video", "container" => "webm", "vcodec" => "vp8"},
+ "170" => {"ext" => "webm", "height" => 1080, "width" => 1920, "format" => "DASH video", "container" => "webm", "vcodec" => "vp8"},
+ "218" => {"ext" => "webm", "height" => 480, "width" => 854, "format" => "DASH video", "container" => "webm", "vcodec" => "vp8"},
+ "219" => {"ext" => "webm", "height" => 480, "width" => 854, "format" => "DASH video", "container" => "webm", "vcodec" => "vp8"},
+ "278" => {"ext" => "webm", "height" => 144, "format" => "DASH video", "container" => "webm", "vcodec" => "vp9"},
+ "242" => {"ext" => "webm", "height" => 240, "format" => "DASH video", "vcodec" => "vp9"},
+ "243" => {"ext" => "webm", "height" => 360, "format" => "DASH video", "vcodec" => "vp9"},
+ "244" => {"ext" => "webm", "height" => 480, "format" => "DASH video", "vcodec" => "vp9"},
+ "245" => {"ext" => "webm", "height" => 480, "format" => "DASH video", "vcodec" => "vp9"},
+ "246" => {"ext" => "webm", "height" => 480, "format" => "DASH video", "vcodec" => "vp9"},
+ "247" => {"ext" => "webm", "height" => 720, "format" => "DASH video", "vcodec" => "vp9"},
+ "248" => {"ext" => "webm", "height" => 1080, "format" => "DASH video", "vcodec" => "vp9"},
+ "271" => {"ext" => "webm", "height" => 1440, "format" => "DASH video", "vcodec" => "vp9"},
+ # itag 272 videos are either 3840x2160 (e.g. RtoitU2A-3E) or 7680x4320 (sLprVF6d7Ug)
+ "272" => {"ext" => "webm", "height" => 2160, "format" => "DASH video", "vcodec" => "vp9"},
+ "302" => {"ext" => "webm", "height" => 720, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
+ "303" => {"ext" => "webm", "height" => 1080, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
+ "308" => {"ext" => "webm", "height" => 1440, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
+ "313" => {"ext" => "webm", "height" => 2160, "format" => "DASH video", "vcodec" => "vp9"},
+ "315" => {"ext" => "webm", "height" => 2160, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
+ "330" => {"ext" => "webm", "height" => 144, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
+ "331" => {"ext" => "webm", "height" => 240, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
+ "332" => {"ext" => "webm", "height" => 360, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
+ "333" => {"ext" => "webm", "height" => 480, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
+ "334" => {"ext" => "webm", "height" => 720, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
+ "335" => {"ext" => "webm", "height" => 1080, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
+ "336" => {"ext" => "webm", "height" => 1440, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
+ "337" => {"ext" => "webm", "height" => 2160, "format" => "DASH video", "vcodec" => "vp9", "fps" => 60},
+
+ # Dash webm audio
+ "171" => {"ext" => "webm", "acodec" => "vorbis", "format" => "DASH audio", "abr" => 128},
+ "172" => {"ext" => "webm", "acodec" => "vorbis", "format" => "DASH audio", "abr" => 256},
+
+ # Dash webm audio with opus inside
+ "249" => {"ext" => "webm", "format" => "DASH audio", "acodec" => "opus", "abr" => 50},
+ "250" => {"ext" => "webm", "format" => "DASH audio", "acodec" => "opus", "abr" => 70},
+ "251" => {"ext" => "webm", "format" => "DASH audio", "acodec" => "opus", "abr" => 160},
+
+ # av01 video only formats sometimes served with "unknown" codecs
+ "394" => {"ext" => "mp4", "height" => 144, "vcodec" => "av01.0.05M.08"},
+ "395" => {"ext" => "mp4", "height" => 240, "vcodec" => "av01.0.05M.08"},
+ "396" => {"ext" => "mp4", "height" => 360, "vcodec" => "av01.0.05M.08"},
+ "397" => {"ext" => "mp4", "height" => 480, "vcodec" => "av01.0.05M.08"},
+ }
+end
diff --git a/src/invidious/videos/music.cr b/src/invidious/videos/music.cr
new file mode 100644
index 00000000..08d88a3e
--- /dev/null
+++ b/src/invidious/videos/music.cr
@@ -0,0 +1,13 @@
+require "json"
+
+struct VideoMusic
+ include JSON::Serializable
+
+ property song : String
+ property album : String
+ property artist : String
+ property license : String
+
+ def initialize(@song : String, @album : String, @artist : String, @license : String)
+ end
+end
diff --git a/src/invidious/videos/parser.cr b/src/invidious/videos/parser.cr
new file mode 100644
index 00000000..915c9baf
--- /dev/null
+++ b/src/invidious/videos/parser.cr
@@ -0,0 +1,489 @@
+require "json"
+
+# Use to parse both "compactVideoRenderer" and "endScreenVideoRenderer".
+# The former is preferred as it has more videos in it. The second has
+# the same 11 first entries as the compact rendered.
+#
+# TODO: "compactRadioRenderer" (Mix) and
+# TODO: Use a proper struct/class instead of a hacky JSON object
+def parse_related_video(related : JSON::Any) : Hash(String, JSON::Any)?
+ return nil if !related["videoId"]?
+
+ # The compact renderer has video length in seconds, where the end
+ # screen rendered has a full text version ("42:40")
+ length = related["lengthInSeconds"]?.try &.as_i.to_s
+ length ||= related.dig?("lengthText", "simpleText").try do |box|
+ decode_length_seconds(box.as_s).to_s
+ end
+
+ # Both have "short", so the "long" option shouldn't be required
+ channel_info = (related["shortBylineText"]? || related["longBylineText"]?)
+ .try &.dig?("runs", 0)
+
+ author = channel_info.try &.dig?("text")
+ author_verified = has_verified_badge?(related["ownerBadges"]?).to_s
+
+ ucid = channel_info.try { |ci| HelperExtractors.get_browse_id(ci) }
+
+ # "4,088,033 views", only available on compact renderer
+ # and when video is not a livestream
+ view_count = related.dig?("viewCountText", "simpleText")
+ .try &.as_s.gsub(/\D/, "")
+
+ short_view_count = related.try do |r|
+ HelperExtractors.get_short_view_count(r).to_s
+ end
+
+ LOGGER.trace("parse_related_video: Found \"watchNextEndScreenRenderer\" container")
+
+ # TODO: when refactoring video types, make a struct for related videos
+ # or reuse an existing type, if that fits.
+ return {
+ "id" => related["videoId"],
+ "title" => related["title"]["simpleText"],
+ "author" => author || JSON::Any.new(""),
+ "ucid" => JSON::Any.new(ucid || ""),
+ "length_seconds" => JSON::Any.new(length || "0"),
+ "view_count" => JSON::Any.new(view_count || "0"),
+ "short_view_count" => JSON::Any.new(short_view_count || "0"),
+ "author_verified" => JSON::Any.new(author_verified),
+ }
+end
+
+def extract_video_info(video_id : String)
+ # Init client config for the API
+ client_config = YoutubeAPI::ClientConfig.new
+
+ # Fetch data from the player endpoint
+ player_response = YoutubeAPI.player(video_id: video_id, params: "2AMB", client_config: client_config)
+
+ playability_status = player_response.dig?("playabilityStatus", "status").try &.as_s
+
+ if playability_status != "OK"
+ subreason = player_response.dig?("playabilityStatus", "errorScreen", "playerErrorMessageRenderer", "subreason")
+ reason = subreason.try &.[]?("simpleText").try &.as_s
+ reason ||= subreason.try &.[]("runs").as_a.map(&.[]("text")).join("")
+ reason ||= player_response.dig("playabilityStatus", "reason").as_s
+
+ # Stop here if video is not a scheduled livestream or
+ # for LOGIN_REQUIRED when videoDetails element is not found because retrying won't help
+ if !{"LIVE_STREAM_OFFLINE", "LOGIN_REQUIRED"}.any?(playability_status) ||
+ playability_status == "LOGIN_REQUIRED" && !player_response.dig?("videoDetails")
+ return {
+ "version" => JSON::Any.new(Video::SCHEMA_VERSION.to_i64),
+ "reason" => JSON::Any.new(reason),
+ }
+ end
+ elsif video_id != player_response.dig("videoDetails", "videoId")
+ # YouTube may return a different video player response than expected.
+ # See: https://github.com/TeamNewPipe/NewPipe/issues/8713
+ # Line to be reverted if one day we solve the video not available issue.
+
+ # Although technically not a call to /videoplayback the fact that YouTube is returning the
+ # wrong video means that we should count it as a failure.
+ get_playback_statistic()["totalRequests"] += 1
+
+ return {
+ "version" => JSON::Any.new(Video::SCHEMA_VERSION.to_i64),
+ "reason" => JSON::Any.new("Can't load the video on this Invidious instance. YouTube is currently trying to block Invidious instances. <a href=\"https://github.com/iv-org/invidious/issues/3822\">Click here for more info about the issue.</a>"),
+ }
+ else
+ reason = nil
+ end
+
+ # Don't fetch the next endpoint if the video is unavailable.
+ if {"OK", "LIVE_STREAM_OFFLINE", "LOGIN_REQUIRED"}.any?(playability_status)
+ next_response = YoutubeAPI.next({"videoId": video_id, "params": ""})
+ player_response = player_response.merge(next_response)
+ end
+
+ params = parse_video_info(video_id, player_response)
+ params["reason"] = JSON::Any.new(reason) if reason
+
+ new_player_response = nil
+
+ # Don't use Android test suite client if po_token is passed because po_token doesn't
+ # work for Android test suite client.
+ if reason.nil? && CONFIG.po_token.nil?
+ # Fetch the video streams using an Android client in order to get the
+ # decrypted URLs and maybe fix throttling issues (#2194). See the
+ # following issue for an explanation about decrypted URLs:
+ # https://github.com/TeamNewPipe/NewPipeExtractor/issues/562
+ client_config.client_type = YoutubeAPI::ClientType::AndroidTestSuite
+ new_player_response = try_fetch_streaming_data(video_id, client_config)
+ end
+
+ # Replace player response and reset reason
+ if !new_player_response.nil?
+ # Preserve captions & storyboard data before replacement
+ new_player_response["storyboards"] = player_response["storyboards"] if player_response["storyboards"]?
+ new_player_response["captions"] = player_response["captions"] if player_response["captions"]?
+
+ player_response = new_player_response
+ params.delete("reason")
+ end
+
+ {"captions", "playabilityStatus", "playerConfig", "storyboards"}.each do |f|
+ params[f] = player_response[f] if player_response[f]?
+ end
+
+ # Convert URLs, if those are present
+ if streaming_data = player_response["streamingData"]?
+ %w[formats adaptiveFormats].each do |key|
+ streaming_data.as_h[key]?.try &.as_a.each do |format|
+ format.as_h["url"] = JSON::Any.new(convert_url(format))
+ end
+ end
+
+ params["streamingData"] = streaming_data
+ end
+
+ # Data structure version, for cache control
+ params["version"] = JSON::Any.new(Video::SCHEMA_VERSION.to_i64)
+
+ return params
+end
+
+def try_fetch_streaming_data(id : String, client_config : YoutubeAPI::ClientConfig) : Hash(String, JSON::Any)?
+ LOGGER.debug("try_fetch_streaming_data: [#{id}] Using #{client_config.client_type} client.")
+ response = YoutubeAPI.player(video_id: id, params: "2AMB", client_config: client_config)
+
+ playability_status = response["playabilityStatus"]["status"]
+ LOGGER.debug("try_fetch_streaming_data: [#{id}] Got playabilityStatus == #{playability_status}.")
+
+ if id != response.dig("videoDetails", "videoId")
+ # YouTube may return a different video player response than expected.
+ # See: https://github.com/TeamNewPipe/NewPipe/issues/8713
+ raise InfoException.new(
+ "The video returned by YouTube isn't the requested one. (#{client_config.client_type} client)"
+ )
+ elsif playability_status == "OK"
+ return response
+ else
+ return nil
+ end
+end
+
+def parse_video_info(video_id : String, player_response : Hash(String, JSON::Any)) : Hash(String, JSON::Any)
+ # Top level elements
+
+ main_results = player_response.dig?("contents", "twoColumnWatchNextResults")
+
+ raise BrokenTubeException.new("twoColumnWatchNextResults") if !main_results
+
+ # Primary results are not available on Music videos
+ # See: https://github.com/iv-org/invidious/pull/3238#issuecomment-1207193725
+ if primary_results = main_results.dig?("results", "results", "contents")
+ video_primary_renderer = primary_results
+ .as_a.find(&.["videoPrimaryInfoRenderer"]?)
+ .try &.["videoPrimaryInfoRenderer"]
+
+ video_secondary_renderer = primary_results
+ .as_a.find(&.["videoSecondaryInfoRenderer"]?)
+ .try &.["videoSecondaryInfoRenderer"]
+
+ raise BrokenTubeException.new("videoPrimaryInfoRenderer") if !video_primary_renderer
+ raise BrokenTubeException.new("videoSecondaryInfoRenderer") if !video_secondary_renderer
+ end
+
+ video_details = player_response.dig?("videoDetails")
+ if !(microformat = player_response.dig?("microformat", "playerMicroformatRenderer"))
+ microformat = {} of String => JSON::Any
+ end
+
+ raise BrokenTubeException.new("videoDetails") if !video_details
+
+ # Basic video infos
+
+ title = video_details["title"]?.try &.as_s
+
+ # We have to try to extract viewCount from videoPrimaryInfoRenderer first,
+ # then from videoDetails, as the latter is "0" for livestreams (we want
+ # to get the amount of viewers watching).
+ views_txt = extract_text(
+ video_primary_renderer
+ .try &.dig?("viewCount", "videoViewCountRenderer", "viewCount")
+ )
+ views_txt ||= video_details["viewCount"]?.try &.as_s || ""
+ views = views_txt.gsub(/\D/, "").to_i64?
+
+ length_txt = (microformat["lengthSeconds"]? || video_details["lengthSeconds"])
+ .try &.as_s.to_i64
+
+ published = microformat["publishDate"]?
+ .try { |t| Time.parse(t.as_s, "%Y-%m-%d", Time::Location::UTC) } || Time.utc
+
+ premiere_timestamp = microformat.dig?("liveBroadcastDetails", "startTimestamp")
+ .try { |t| Time.parse_rfc3339(t.as_s) }
+
+ premiere_timestamp ||= player_response.dig?(
+ "playabilityStatus", "liveStreamability",
+ "liveStreamabilityRenderer", "offlineSlate",
+ "liveStreamOfflineSlateRenderer", "scheduledStartTime"
+ )
+ .try &.as_s.to_i64
+ .try { |t| Time.unix(t) }
+
+ live_now = microformat.dig?("liveBroadcastDetails", "isLiveNow")
+ .try &.as_bool
+ live_now ||= video_details.dig?("isLive").try &.as_bool || false
+
+ post_live_dvr = video_details.dig?("isPostLiveDvr")
+ .try &.as_bool || false
+
+ # Extra video infos
+
+ allowed_regions = microformat["availableCountries"]?
+ .try &.as_a.map &.as_s || [] of String
+
+ allow_ratings = video_details["allowRatings"]?.try &.as_bool
+ family_friendly = microformat["isFamilySafe"]?.try &.as_bool
+ is_listed = video_details["isCrawlable"]?.try &.as_bool
+ is_upcoming = video_details["isUpcoming"]?.try &.as_bool
+
+ keywords = video_details["keywords"]?
+ .try &.as_a.map &.as_s || [] of String
+
+ # Related videos
+
+ LOGGER.debug("extract_video_info: parsing related videos...")
+
+ related = [] of JSON::Any
+
+ # Parse "compactVideoRenderer" items (under secondary results)
+ secondary_results = main_results
+ .dig?("secondaryResults", "secondaryResults", "results")
+ secondary_results.try &.as_a.each do |element|
+ if item = element["compactVideoRenderer"]?
+ related_video = parse_related_video(item)
+ related << JSON::Any.new(related_video) if related_video
+ end
+ end
+
+ # If nothing was found previously, fall back to end screen renderer
+ if related.empty?
+ # Container for "endScreenVideoRenderer" items
+ player_overlays = player_response.dig?(
+ "playerOverlays", "playerOverlayRenderer",
+ "endScreen", "watchNextEndScreenRenderer", "results"
+ )
+
+ player_overlays.try &.as_a.each do |element|
+ if item = element["endScreenVideoRenderer"]?
+ related_video = parse_related_video(item)
+ related << JSON::Any.new(related_video) if related_video
+ end
+ end
+ end
+
+ # Likes
+
+ toplevel_buttons = video_primary_renderer
+ .try &.dig?("videoActions", "menuRenderer", "topLevelButtons")
+
+ if toplevel_buttons
+ # New Format as of december 2023
+ likes_button = toplevel_buttons.dig?(0,
+ "segmentedLikeDislikeButtonViewModel",
+ "likeButtonViewModel",
+ "likeButtonViewModel",
+ "toggleButtonViewModel",
+ "toggleButtonViewModel",
+ "defaultButtonViewModel",
+ "buttonViewModel"
+ )
+
+ likes_button ||= toplevel_buttons.try &.as_a
+ .find(&.dig?("toggleButtonRenderer", "defaultIcon", "iconType").=== "LIKE")
+ .try &.["toggleButtonRenderer"]
+
+ # New format as of september 2022
+ likes_button ||= toplevel_buttons.try &.as_a
+ .find(&.["segmentedLikeDislikeButtonRenderer"]?)
+ .try &.dig?(
+ "segmentedLikeDislikeButtonRenderer",
+ "likeButton", "toggleButtonRenderer"
+ )
+
+ if likes_button
+ likes_txt = likes_button.dig?("accessibilityText")
+ # Note: The like count from `toggledText` is off by one, as it would
+ # represent the new like count in the event where the user clicks on "like".
+ likes_txt ||= (likes_button["defaultText"]? || likes_button["toggledText"]?)
+ .try &.dig?("accessibility", "accessibilityData", "label")
+ likes = likes_txt.as_s.gsub(/\D/, "").to_i64? if likes_txt
+
+ LOGGER.trace("extract_video_info: Found \"likes\" button. Button text is \"#{likes_txt}\"")
+ LOGGER.debug("extract_video_info: Likes count is #{likes}") if likes
+ end
+ end
+
+ # Description
+
+ description = microformat.dig?("description", "simpleText").try &.as_s || ""
+ short_description = player_response.dig?("videoDetails", "shortDescription")
+
+ # description_html = video_secondary_renderer.try &.dig?("description", "runs")
+ # .try &.as_a.try { |t| content_to_comment_html(t, video_id) }
+
+ description_html = parse_description(video_secondary_renderer.try &.dig?("attributedDescription"), video_id)
+
+ # Video metadata
+
+ metadata = video_secondary_renderer
+ .try &.dig?("metadataRowContainer", "metadataRowContainerRenderer", "rows")
+ .try &.as_a
+
+ genre = microformat["category"]?
+ genre_ucid = nil
+ license = nil
+
+ metadata.try &.each do |row|
+ metadata_title = extract_text(row.dig?("metadataRowRenderer", "title"))
+ contents = row.dig?("metadataRowRenderer", "contents", 0)
+
+ if metadata_title == "Category"
+ contents = contents.try &.dig?("runs", 0)
+
+ genre = contents.try &.["text"]?
+ genre_ucid = contents.try &.dig?("navigationEndpoint", "browseEndpoint", "browseId")
+ elsif metadata_title == "License"
+ license = contents.try &.dig?("runs", 0, "text")
+ elsif metadata_title == "Licensed to YouTube by"
+ license = contents.try &.["simpleText"]?
+ end
+ end
+
+ # Music section
+
+ music_list = [] of VideoMusic
+ music_desclist = player_response.dig?(
+ "engagementPanels", 1, "engagementPanelSectionListRenderer",
+ "content", "structuredDescriptionContentRenderer", "items", 2,
+ "videoDescriptionMusicSectionRenderer", "carouselLockups"
+ )
+
+ music_desclist.try &.as_a.each do |music_desc|
+ artist = nil
+ album = nil
+ music_license = nil
+
+ # Used when the video has multiple songs
+ if song_title = music_desc.dig?("carouselLockupRenderer", "videoLockup", "compactVideoRenderer", "title")
+ # "simpleText" for plain text / "runs" when song has a link
+ song = song_title["simpleText"]? || song_title.dig?("runs", 0, "text")
+
+ # some videos can have empty tracks. See: https://www.youtube.com/watch?v=eBGIQ7ZuuiU
+ next if !song
+ end
+
+ music_desc.dig?("carouselLockupRenderer", "infoRows").try &.as_a.each do |desc|
+ desc_title = extract_text(desc.dig?("infoRowRenderer", "title"))
+ if desc_title == "ARTIST"
+ artist = extract_text(desc.dig?("infoRowRenderer", "defaultMetadata"))
+ elsif desc_title == "SONG"
+ song = extract_text(desc.dig?("infoRowRenderer", "defaultMetadata"))
+ elsif desc_title == "ALBUM"
+ album = extract_text(desc.dig?("infoRowRenderer", "defaultMetadata"))
+ elsif desc_title == "LICENSES"
+ music_license = extract_text(desc.dig?("infoRowRenderer", "expandedMetadata"))
+ end
+ end
+ music_list << VideoMusic.new(song.to_s, album.to_s, artist.to_s, music_license.to_s)
+ end
+
+ # Author infos
+
+ author = video_details["author"]?.try &.as_s
+ ucid = video_details["channelId"]?.try &.as_s
+
+ if author_info = video_secondary_renderer.try &.dig?("owner", "videoOwnerRenderer")
+ author_thumbnail = author_info.dig?("thumbnail", "thumbnails", 0, "url")
+ author_verified = has_verified_badge?(author_info["badges"]?)
+
+ subs_text = author_info["subscriberCountText"]?
+ .try { |t| t["simpleText"]? || t.dig?("runs", 0, "text") }
+ .try &.as_s.split(" ", 2)[0]
+ end
+
+ # Return data
+
+ if live_now
+ video_type = VideoType::Livestream
+ elsif !premiere_timestamp.nil?
+ video_type = VideoType::Scheduled
+ published = premiere_timestamp || Time.utc
+ else
+ video_type = VideoType::Video
+ end
+
+ params = {
+ "videoType" => JSON::Any.new(video_type.to_s),
+ # Basic video infos
+ "title" => JSON::Any.new(title || ""),
+ "views" => JSON::Any.new(views || 0_i64),
+ "likes" => JSON::Any.new(likes || 0_i64),
+ "lengthSeconds" => JSON::Any.new(length_txt || 0_i64),
+ "published" => JSON::Any.new(published.to_rfc3339),
+ # Extra video infos
+ "allowedRegions" => JSON::Any.new(allowed_regions.map { |v| JSON::Any.new(v) }),
+ "allowRatings" => JSON::Any.new(allow_ratings || false),
+ "isFamilyFriendly" => JSON::Any.new(family_friendly || false),
+ "isListed" => JSON::Any.new(is_listed || false),
+ "isUpcoming" => JSON::Any.new(is_upcoming || false),
+ "keywords" => JSON::Any.new(keywords.map { |v| JSON::Any.new(v) }),
+ "isPostLiveDvr" => JSON::Any.new(post_live_dvr),
+ # Related videos
+ "relatedVideos" => JSON::Any.new(related),
+ # Description
+ "description" => JSON::Any.new(description || ""),
+ "descriptionHtml" => JSON::Any.new(description_html || "<p></p>"),
+ "shortDescription" => JSON::Any.new(short_description.try &.as_s || nil),
+ # Video metadata
+ "genre" => JSON::Any.new(genre.try &.as_s || ""),
+ "genreUcid" => JSON::Any.new(genre_ucid.try &.as_s?),
+ "license" => JSON::Any.new(license.try &.as_s || ""),
+ # Music section
+ "music" => JSON.parse(music_list.to_json),
+ # Author infos
+ "author" => JSON::Any.new(author || ""),
+ "ucid" => JSON::Any.new(ucid || ""),
+ "authorThumbnail" => JSON::Any.new(author_thumbnail.try &.as_s || ""),
+ "authorVerified" => JSON::Any.new(author_verified || false),
+ "subCountText" => JSON::Any.new(subs_text || "-"),
+ }
+
+ return params
+end
+
+private def convert_url(fmt)
+ if cfr = fmt["signatureCipher"]?.try { |json| HTTP::Params.parse(json.as_s) }
+ sp = cfr["sp"]
+ url = URI.parse(cfr["url"])
+ params = url.query_params
+
+ LOGGER.debug("convert_url: Decoding '#{cfr}'")
+
+ unsig = DECRYPT_FUNCTION.try &.decrypt_signature(cfr["s"])
+ params[sp] = unsig if unsig
+ else
+ url = URI.parse(fmt["url"].as_s)
+ params = url.query_params
+ end
+
+ n = DECRYPT_FUNCTION.try &.decrypt_nsig(params["n"])
+ params["n"] = n if n
+
+ if token = CONFIG.po_token
+ params["pot"] = token
+ end
+
+ url.query_params = params
+ LOGGER.trace("convert_url: new url is '#{url}'")
+
+ return url.to_s
+rescue ex
+ LOGGER.debug("convert_url: Error when parsing video URL")
+ LOGGER.trace(ex.inspect_with_backtrace)
+ return ""
+end
diff --git a/src/invidious/videos/regions.cr b/src/invidious/videos/regions.cr
new file mode 100644
index 00000000..575f8c25
--- /dev/null
+++ b/src/invidious/videos/regions.cr
@@ -0,0 +1,27 @@
+# List of geographical regions that Youtube recognizes.
+# This is used to determine if a video is either restricted to a list
+# of allowed regions (= whitelisted) or if it can't be watched in
+# a set of regions (= blacklisted).
+REGIONS = {
+ "AD", "AE", "AF", "AG", "AI", "AL", "AM", "AO", "AQ", "AR", "AS", "AT",
+ "AU", "AW", "AX", "AZ", "BA", "BB", "BD", "BE", "BF", "BG", "BH", "BI",
+ "BJ", "BL", "BM", "BN", "BO", "BQ", "BR", "BS", "BT", "BV", "BW", "BY",
+ "BZ", "CA", "CC", "CD", "CF", "CG", "CH", "CI", "CK", "CL", "CM", "CN",
+ "CO", "CR", "CU", "CV", "CW", "CX", "CY", "CZ", "DE", "DJ", "DK", "DM",
+ "DO", "DZ", "EC", "EE", "EG", "EH", "ER", "ES", "ET", "FI", "FJ", "FK",
+ "FM", "FO", "FR", "GA", "GB", "GD", "GE", "GF", "GG", "GH", "GI", "GL",
+ "GM", "GN", "GP", "GQ", "GR", "GS", "GT", "GU", "GW", "GY", "HK", "HM",
+ "HN", "HR", "HT", "HU", "ID", "IE", "IL", "IM", "IN", "IO", "IQ", "IR",
+ "IS", "IT", "JE", "JM", "JO", "JP", "KE", "KG", "KH", "KI", "KM", "KN",
+ "KP", "KR", "KW", "KY", "KZ", "LA", "LB", "LC", "LI", "LK", "LR", "LS",
+ "LT", "LU", "LV", "LY", "MA", "MC", "MD", "ME", "MF", "MG", "MH", "MK",
+ "ML", "MM", "MN", "MO", "MP", "MQ", "MR", "MS", "MT", "MU", "MV", "MW",
+ "MX", "MY", "MZ", "NA", "NC", "NE", "NF", "NG", "NI", "NL", "NO", "NP",
+ "NR", "NU", "NZ", "OM", "PA", "PE", "PF", "PG", "PH", "PK", "PL", "PM",
+ "PN", "PR", "PS", "PT", "PW", "PY", "QA", "RE", "RO", "RS", "RU", "RW",
+ "SA", "SB", "SC", "SD", "SE", "SG", "SH", "SI", "SJ", "SK", "SL", "SM",
+ "SN", "SO", "SR", "SS", "ST", "SV", "SX", "SY", "SZ", "TC", "TD", "TF",
+ "TG", "TH", "TJ", "TK", "TL", "TM", "TN", "TO", "TR", "TT", "TV", "TW",
+ "TZ", "UA", "UG", "UM", "US", "UY", "UZ", "VA", "VC", "VE", "VG", "VI",
+ "VN", "VU", "WF", "WS", "YE", "YT", "ZA", "ZM", "ZW",
+}
diff --git a/src/invidious/videos/storyboard.cr b/src/invidious/videos/storyboard.cr
new file mode 100644
index 00000000..a72c2f55
--- /dev/null
+++ b/src/invidious/videos/storyboard.cr
@@ -0,0 +1,122 @@
+require "uri"
+require "http/params"
+
+module Invidious::Videos
+ struct Storyboard
+ # Template URL
+ getter url : URI
+ getter proxied_url : URI
+
+ # Thumbnail parameters
+ getter width : Int32
+ getter height : Int32
+ getter count : Int32
+ getter interval : Int32
+
+ # Image (storyboard) parameters
+ getter rows : Int32
+ getter columns : Int32
+ getter images_count : Int32
+
+ def initialize(
+ *, @url, @width, @height, @count, @interval,
+ @rows, @columns, @images_count
+ )
+ authority = /(i\d?).ytimg.com/.match!(@url.host.not_nil!)[1]?
+
+ @proxied_url = URI.parse(HOST_URL)
+ @proxied_url.path = "/sb/#{authority}/#{@url.path.lchop("/sb/")}"
+ @proxied_url.query = @url.query
+ end
+
+ # Parse the JSON structure from Youtube
+ def self.from_yt_json(container : JSON::Any, length_seconds : Int32) : Array(Storyboard)
+ # Livestream storyboards are a bit different
+ # TODO: document exactly how
+ if storyboard = container.dig?("playerLiveStoryboardSpecRenderer", "spec").try &.as_s
+ return [Storyboard.new(
+ url: URI.parse(storyboard.split("#")[0]),
+ width: 106,
+ height: 60,
+ count: -1,
+ interval: 5000,
+ rows: 3,
+ columns: 3,
+ images_count: -1
+ )]
+ end
+
+ # Split the storyboard string into chunks
+ #
+ # General format (whitespaces added for legibility):
+ # https://i.ytimg.com/sb/<video_id>/storyboard3_L$L/$N.jpg?sqp=<sig0>
+ # | 48 # 27 # 100 # 10 # 10 # 0 # default # rs$<sig1>
+ # | 80 # 45 # 95 # 10 # 10 # 10000 # M$M # rs$<sig2>
+ # | 160 # 90 # 95 # 5 # 5 # 10000 # M$M # rs$<sig3>
+ #
+ storyboards = container.dig?("playerStoryboardSpecRenderer", "spec")
+ .try &.as_s.split("|")
+
+ return [] of Storyboard if !storyboards
+
+ # The base URL is the first chunk
+ base_url = URI.parse(storyboards.shift)
+
+ return storyboards.map_with_index do |sb, i|
+ # Separate the different storyboard parameters:
+ # width/height: respective dimensions, in pixels, of a single thumbnail
+ # count: how many thumbnails are displayed across the full video
+ # columns/rows: maximum amount of thumbnails that can be stuffed in a
+ # single image, horizontally and vertically.
+ # interval: interval between two thumbnails, in milliseconds
+ # name: storyboard filename. Usually "M$M" or "default"
+ # sigh: URL cryptographic signature
+ width, height, count, columns, rows, interval, name, sigh = sb.split("#")
+
+ width = width.to_i
+ height = height.to_i
+ count = count.to_i
+ interval = interval.to_i
+ columns = columns.to_i
+ rows = rows.to_i
+
+ # Copy base URL object, so that we can modify it
+ url = base_url.dup
+
+ # Add the signature to the URL
+ params = url.query_params
+ params["sigh"] = sigh
+ url.query_params = params
+
+ # Replace the template parts with what we have
+ url.path = url.path.sub("$L", i).sub("$N", name)
+
+ # This value represents the maximum amount of thumbnails that can fit
+ # in a single image. The last image (or the only one for short videos)
+ # will contain less thumbnails than that.
+ thumbnails_per_image = columns * rows
+
+ # This value represents the total amount of storyboards required to
+ # hold all of the thumbnails. It can't be less than 1.
+ images_count = (count / thumbnails_per_image).ceil.to_i
+
+ # Compute the interval when needed (in general, that's only required
+ # for the first "default" storyboard).
+ if interval == 0
+ interval = ((length_seconds / count) * 1_000).to_i
+ end
+
+ Storyboard.new(
+ url: url,
+ width: width,
+ height: height,
+ count: count,
+ interval: interval,
+ rows: rows,
+ columns: columns,
+ images_count: images_count,
+ )
+ end
+ end
+ end
+end
diff --git a/src/invidious/videos/transcript.cr b/src/invidious/videos/transcript.cr
new file mode 100644
index 00000000..4bd9f820
--- /dev/null
+++ b/src/invidious/videos/transcript.cr
@@ -0,0 +1,126 @@
+module Invidious::Videos
+ # A `Transcripts` struct encapsulates a sequence of lines that together forms the whole transcript for a given YouTube video.
+ # These lines can be categorized into two types: section headings and regular lines representing content from the video.
+ struct Transcript
+ # Types
+ record HeadingLine, start_ms : Time::Span, end_ms : Time::Span, line : String
+ record RegularLine, start_ms : Time::Span, end_ms : Time::Span, line : String
+ alias TranscriptLine = HeadingLine | RegularLine
+
+ property lines : Array(TranscriptLine)
+
+ property language_code : String
+ property auto_generated : Bool
+
+ # User friendly label for the current transcript.
+ # Example: "English (auto-generated)"
+ property label : String
+
+ # Initializes a new Transcript struct with the contents and associated metadata describing it
+ def initialize(@lines : Array(TranscriptLine), @language_code : String, @auto_generated : Bool, @label : String)
+ end
+
+ # Generates a protobuf string to fetch the requested transcript from YouTube
+ def self.generate_param(video_id : String, language_code : String, auto_generated : Bool) : String
+ kind = auto_generated ? "asr" : ""
+
+ object = {
+ "1:0:string" => video_id,
+
+ "2:base64" => {
+ "1:string" => kind,
+ "2:string" => language_code,
+ "3:string" => "",
+ },
+
+ "3:varint" => 1_i64,
+ "5:string" => "engagement-panel-searchable-transcript-search-panel",
+ "6:varint" => 1_i64,
+ "7:varint" => 1_i64,
+ "8:varint" => 1_i64,
+ }
+
+ params = object.try { |i| Protodec::Any.cast_json(i) }
+ .try { |i| Protodec::Any.from_json(i) }
+ .try { |i| Base64.urlsafe_encode(i) }
+ .try { |i| URI.encode_www_form(i) }
+
+ return params
+ end
+
+ # Constructs a Transcripts struct from the initial YouTube response
+ def self.from_raw(initial_data : Hash(String, JSON::Any), language_code : String, auto_generated : Bool)
+ transcript_panel = initial_data.dig("actions", 0, "updateEngagementPanelAction", "content", "transcriptRenderer",
+ "content", "transcriptSearchPanelRenderer")
+
+ segment_list = transcript_panel.dig("body", "transcriptSegmentListRenderer")
+
+ if !segment_list["initialSegments"]?
+ raise NotFoundException.new("Requested transcript does not exist")
+ end
+
+ # Extract user-friendly label for the current transcript
+
+ footer_language_menu = transcript_panel.dig?(
+ "footer", "transcriptFooterRenderer", "languageMenu", "sortFilterSubMenuRenderer", "subMenuItems"
+ )
+
+ if footer_language_menu
+ label = footer_language_menu.as_a.select(&.["selected"].as_bool)[0]["title"].as_s
+ else
+ label = language_code
+ end
+
+ # Extract transcript lines
+
+ initial_segments = segment_list["initialSegments"].as_a
+
+ lines = [] of TranscriptLine
+
+ initial_segments.each do |line|
+ if unpacked_line = line["transcriptSectionHeaderRenderer"]?
+ line_type = HeadingLine
+ else
+ unpacked_line = line["transcriptSegmentRenderer"]
+ line_type = RegularLine
+ end
+
+ start_ms = unpacked_line["startMs"].as_s.to_i.millisecond
+ end_ms = unpacked_line["endMs"].as_s.to_i.millisecond
+ text = extract_text(unpacked_line["snippet"]) || ""
+
+ lines << line_type.new(start_ms, end_ms, text)
+ end
+
+ return Transcript.new(
+ lines: lines,
+ language_code: language_code,
+ auto_generated: auto_generated,
+ label: label
+ )
+ end
+
+ # Converts transcript lines to a WebVTT file
+ #
+ # This is used within Invidious to replace subtitles
+ # as to workaround YouTube's rate-limited timedtext endpoint.
+ def to_vtt
+ settings_field = {
+ "Kind" => "captions",
+ "Language" => @language_code,
+ }
+
+ vtt = WebVTT.build(settings_field) do |builder|
+ @lines.each do |line|
+ # Section headers are excluded from the VTT conversion as to
+ # match the regular captions returned from YouTube as much as possible
+ next if line.is_a? HeadingLine
+
+ builder.cue(line.start_ms, line.end_ms, line.line)
+ end
+ end
+
+ return vtt
+ end
+ end
+end
diff --git a/src/invidious/videos/video_preferences.cr b/src/invidious/videos/video_preferences.cr
new file mode 100644
index 00000000..48177bd8
--- /dev/null
+++ b/src/invidious/videos/video_preferences.cr
@@ -0,0 +1,162 @@
+struct VideoPreferences
+ include JSON::Serializable
+
+ property annotations : Bool
+ property preload : Bool
+ property autoplay : Bool
+ property comments : Array(String)
+ property continue : Bool
+ property continue_autoplay : Bool
+ property controls : Bool
+ property listen : Bool
+ property local : Bool
+ property preferred_captions : Array(String)
+ property player_style : String
+ property quality : String
+ property quality_dash : String
+ property raw : Bool
+ property region : String?
+ property related_videos : Bool
+ property speed : Float32 | Float64
+ property video_end : Float64 | Int32
+ property video_loop : Bool
+ property extend_desc : Bool
+ property video_start : Float64 | Int32
+ property volume : Int32
+ property vr_mode : Bool
+ property save_player_pos : Bool
+end
+
+def process_video_params(query, preferences)
+ annotations = query["iv_load_policy"]?.try &.to_i?
+ preload = query["preload"]?.try { |q| (q == "true" || q == "1").to_unsafe }
+ autoplay = query["autoplay"]?.try { |q| (q == "true" || q == "1").to_unsafe }
+ comments = query["comments"]?.try &.split(",").map(&.downcase)
+ continue = query["continue"]?.try { |q| (q == "true" || q == "1").to_unsafe }
+ continue_autoplay = query["continue_autoplay"]?.try { |q| (q == "true" || q == "1").to_unsafe }
+ listen = query["listen"]?.try { |q| (q == "true" || q == "1").to_unsafe }
+ local = query["local"]?.try { |q| (q == "true" || q == "1").to_unsafe }
+ player_style = query["player_style"]?
+ preferred_captions = query["subtitles"]?.try &.split(",").map(&.downcase)
+ quality = query["quality"]?
+ quality_dash = query["quality_dash"]?
+ region = query["region"]?
+ related_videos = query["related_videos"]?.try { |q| (q == "true" || q == "1").to_unsafe }
+ speed = query["speed"]?.try &.rchop("x").to_f?
+ video_loop = query["loop"]?.try { |q| (q == "true" || q == "1").to_unsafe }
+ extend_desc = query["extend_desc"]?.try { |q| (q == "true" || q == "1").to_unsafe }
+ volume = query["volume"]?.try &.to_i?
+ vr_mode = query["vr_mode"]?.try { |q| (q == "true" || q == "1").to_unsafe }
+ save_player_pos = query["save_player_pos"]?.try { |q| (q == "true" || q == "1").to_unsafe }
+
+ if preferences
+ # region ||= preferences.region
+ annotations ||= preferences.annotations.to_unsafe
+ preload ||= preferences.preload.to_unsafe
+ autoplay ||= preferences.autoplay.to_unsafe
+ comments ||= preferences.comments
+ continue ||= preferences.continue.to_unsafe
+ continue_autoplay ||= preferences.continue_autoplay.to_unsafe
+ listen ||= preferences.listen.to_unsafe
+ local ||= preferences.local.to_unsafe
+ player_style ||= preferences.player_style
+ preferred_captions ||= preferences.captions
+ quality ||= preferences.quality
+ quality_dash ||= preferences.quality_dash
+ related_videos ||= preferences.related_videos.to_unsafe
+ speed ||= preferences.speed
+ video_loop ||= preferences.video_loop.to_unsafe
+ extend_desc ||= preferences.extend_desc.to_unsafe
+ volume ||= preferences.volume
+ vr_mode ||= preferences.vr_mode.to_unsafe
+ save_player_pos ||= preferences.save_player_pos.to_unsafe
+ end
+
+ annotations ||= CONFIG.default_user_preferences.annotations.to_unsafe
+ preload ||= CONFIG.default_user_preferences.preload.to_unsafe
+ autoplay ||= CONFIG.default_user_preferences.autoplay.to_unsafe
+ comments ||= CONFIG.default_user_preferences.comments
+ continue ||= CONFIG.default_user_preferences.continue.to_unsafe
+ continue_autoplay ||= CONFIG.default_user_preferences.continue_autoplay.to_unsafe
+ listen ||= CONFIG.default_user_preferences.listen.to_unsafe
+ local ||= CONFIG.default_user_preferences.local.to_unsafe
+ player_style ||= CONFIG.default_user_preferences.player_style
+ preferred_captions ||= CONFIG.default_user_preferences.captions
+ quality ||= CONFIG.default_user_preferences.quality
+ quality_dash ||= CONFIG.default_user_preferences.quality_dash
+ related_videos ||= CONFIG.default_user_preferences.related_videos.to_unsafe
+ speed ||= CONFIG.default_user_preferences.speed
+ video_loop ||= CONFIG.default_user_preferences.video_loop.to_unsafe
+ extend_desc ||= CONFIG.default_user_preferences.extend_desc.to_unsafe
+ volume ||= CONFIG.default_user_preferences.volume
+ vr_mode ||= CONFIG.default_user_preferences.vr_mode.to_unsafe
+ save_player_pos ||= CONFIG.default_user_preferences.save_player_pos.to_unsafe
+
+ annotations = annotations == 1
+ preload = preload == 1
+ autoplay = autoplay == 1
+ continue = continue == 1
+ continue_autoplay = continue_autoplay == 1
+ listen = listen == 1
+ local = local == 1
+ related_videos = related_videos == 1
+ video_loop = video_loop == 1
+ extend_desc = extend_desc == 1
+ vr_mode = vr_mode == 1
+ save_player_pos = save_player_pos == 1
+
+ if CONFIG.disabled?("dash") && quality == "dash"
+ quality = "high"
+ end
+
+ if CONFIG.disabled?("local") && local
+ local = false
+ end
+
+ if start = query["t"]? || query["time_continue"]? || query["start"]?
+ video_start = decode_time(start)
+ end
+ video_start ||= 0
+
+ if query["end"]?
+ video_end = decode_time(query["end"])
+ end
+ video_end ||= -1
+
+ raw = query["raw"]?.try &.to_i?
+ raw ||= 0
+ raw = raw == 1
+
+ controls = query["controls"]?.try &.to_i?
+ controls ||= 1
+ controls = controls >= 1
+
+ params = VideoPreferences.new({
+ annotations: annotations,
+ preload: preload,
+ autoplay: autoplay,
+ comments: comments,
+ continue: continue,
+ continue_autoplay: continue_autoplay,
+ controls: controls,
+ listen: listen,
+ local: local,
+ player_style: player_style,
+ preferred_captions: preferred_captions,
+ quality: quality,
+ quality_dash: quality_dash,
+ raw: raw,
+ region: region,
+ related_videos: related_videos,
+ speed: speed,
+ video_end: video_end,
+ video_loop: video_loop,
+ extend_desc: extend_desc,
+ video_start: video_start,
+ volume: volume,
+ vr_mode: vr_mode,
+ save_player_pos: save_player_pos,
+ })
+
+ return params
+end
diff --git a/src/invidious/views/add_playlist_items.ecr b/src/invidious/views/add_playlist_items.ecr
index 09eacbc8..6aea82ae 100644
--- a/src/invidious/views/add_playlist_items.ecr
+++ b/src/invidious/views/add_playlist_items.ecr
@@ -11,7 +11,9 @@
<legend><a href="/playlist?list=<%= playlist.id %>"><%= translate(locale, "Editing playlist `x`", %|"#{HTML.escape(playlist.title)}"|) %></a></legend>
<fieldset>
- <input class="pure-input-1" type="search" name="q" <% if query %>value="<%= HTML.escape(query) %>"<% else %>placeholder="<%= translate(locale, "Search for videos") %>"<% end %>>
+ <input class="pure-input-1" type="search" name="q"
+ <% if query %>value="<%= HTML.escape(query.text) %>"<% end %>
+ placeholder="<%= translate(locale, "Search for videos") %>">
<input type="hidden" name="list" value="<%= plid %>">
</fieldset>
</form>
@@ -29,30 +31,5 @@
</script>
<script src="/js/playlist_widget.js?v=<%= ASSET_COMMIT %>"></script>
-<div class="pure-g">
- <% videos.each_slice(4) do |slice| %>
- <% slice.each do |item| %>
- <%= rendered "components/item" %>
- <% end %>
- <% end %>
-</div>
-<% if query %>
- <div class="pure-g h-box">
- <div class="pure-u-1 pure-u-lg-1-5">
- <% if page > 1 %>
- <a href="/add_playlist_items?list=<%= plid %>&q=<%= HTML.escape(query.not_nil!) %>&page=<%= page - 1 %>">
- <%= translate(locale, "Previous page") %>
- </a>
- <% end %>
- </div>
- <div class="pure-u-1 pure-u-lg-3-5"></div>
- <div class="pure-u-1 pure-u-lg-1-5" style="text-align:right">
- <% if count >= 20 %>
- <a href="/add_playlist_items?list=<%= plid %>&q=<%= HTML.escape(query.not_nil!) %>&page=<%= page + 1 %>">
- <%= translate(locale, "Next page") %>
- </a>
- <% end %>
- </div>
- </div>
-<% end %>
+<%= rendered "components/items_paginated" %>
diff --git a/src/invidious/views/channel.ecr b/src/invidious/views/channel.ecr
index 061d7eec..a84e44bc 100644
--- a/src/invidious/views/channel.ecr
+++ b/src/invidious/views/channel.ecr
@@ -1,108 +1,54 @@
-<% content_for "header" do %>
-<title><%= channel.author %> - Invidious</title>
-<link rel="alternate" type="application/rss+xml" title="RSS" href="/feed/channel/<%= channel.ucid %>" />
-<% end %>
+<%-
+ ucid = channel.ucid
+ author = HTML.escape(channel.author)
+ channel_profile_pic = URI.parse(channel.author_thumbnail).request_target
+
+ relative_url =
+ case selected_tab
+ when .shorts? then "/channel/#{ucid}/shorts"
+ when .streams? then "/channel/#{ucid}/streams"
+ when .playlists? then "/channel/#{ucid}/playlists"
+ when .channels? then "/channel/#{ucid}/channels"
+ when .podcasts? then "/channel/#{ucid}/podcasts"
+ when .releases? then "/channel/#{ucid}/releases"
+ else
+ "/channel/#{ucid}"
+ end
+
+ youtube_url = "https://www.youtube.com#{relative_url}"
+ redirect_url = Invidious::Frontend::Misc.redirect_url(env)
+
+ page_nav_html = IV::Frontend::Pagination.nav_ctoken(locale,
+ base_url: relative_url,
+ ctoken: next_continuation
+ )
+%>
-<% if channel.banner %>
- <div class="h-box">
- <img style="width:100%" src="/ggpht<%= URI.parse(channel.banner.not_nil!.gsub("=w1060-", "=w1280-")).request_target %>">
- </div>
-
- <div class="h-box">
- <hr>
- </div>
+<% content_for "header" do %>
+<%- if selected_tab.videos? -%>
+<meta name="description" content="<%= channel.description %>">
+<meta property="og:site_name" content="Invidious">
+<meta property="og:url" content="<%= HOST_URL %>/channel/<%= ucid %>">
+<meta property="og:title" content="<%= author %>">
+<meta property="og:image" content="<%= HOST_URL %>/ggpht<%= channel_profile_pic %>">
+<meta property="og:description" content="<%= channel.description %>">
+<meta name="twitter:card" content="summary">
+<meta name="twitter:url" content="<%= HOST_URL %>/channel/<%= ucid %>">
+<meta name="twitter:title" content="<%= author %>">
+<meta name="twitter:description" content="<%= channel.description %>">
+<meta name="twitter:image" content="<%= HOST_URL %>/ggpht<%= channel_profile_pic %>">
+<link rel="alternate" type="application/rss+xml" title="RSS" href="/feed/channel/<%= ucid %>" />
+<%- end -%>
+
+<link rel="alternate" href="<%= youtube_url %>">
+<title><%= author %> - Invidious</title>
<% end %>
-<div class="pure-g h-box">
- <div class="pure-u-2-3">
- <div class="channel-profile">
- <img src="/ggpht<%= URI.parse(channel.author_thumbnail).request_target %>">
- <span><%= channel.author %></span>
- </div>
- </div>
- <div class="pure-u-1-3" style="text-align:right">
- <h3>
- <a href="/feed/channel/<%= channel.ucid %>"><i class="icon ion-logo-rss"></i></a>
- </h3>
- </div>
-</div>
-
-<div class="h-box">
- <p><span style="white-space:pre-wrap"><%= channel.description_html %></span></p>
-</div>
-
-<div class="h-box">
- <% ucid = channel.ucid %>
- <% author = channel.author %>
- <% sub_count_text = number_to_short_text(channel.sub_count) %>
- <%= rendered "components/subscribe_widget" %>
-</div>
-
-<div class="pure-g h-box">
- <div class="pure-u-1-3">
- <a href="https://www.youtube.com/channel/<%= channel.ucid %>"><%= translate(locale, "View channel on YouTube") %></a>
- <% if !channel.auto_generated %>
- <div class="pure-u-1 pure-md-1-3">
- <b><%= translate(locale, "Videos") %></b>
- </div>
- <% end %>
- <div class="pure-u-1 pure-md-1-3">
- <% if channel.auto_generated %>
- <b><%= translate(locale, "Playlists") %></b>
- <% else %>
- <a href="/channel/<%= channel.ucid %>/playlists"><%= translate(locale, "Playlists") %></a>
- <% end %>
- </div>
- <div class="pure-u-1 pure-md-1-3">
- <% if channel.tabs.includes? "community" %>
- <a href="/channel/<%= channel.ucid %>/community"><%= translate(locale, "Community") %></a>
- <% end %>
- </div>
- </div>
- <div class="pure-u-1-3"></div>
- <div class="pure-u-1-3">
- <div class="pure-g" style="text-align:right">
- <% sort_options.each do |sort| %>
- <div class="pure-u-1 pure-md-1-3">
- <% if sort_by == sort %>
- <b><%= translate(locale, sort) %></b>
- <% else %>
- <a href="/channel/<%= channel.ucid %>?page=<%= page %>&sort_by=<%= sort %>">
- <%= translate(locale, sort) %>
- </a>
- <% end %>
- </div>
- <% end %>
- </div>
- </div>
-</div>
+<%= rendered "components/channel_info" %>
<div class="h-box">
<hr>
</div>
-<div class="pure-g">
- <% items.each_slice(4) do |slice| %>
- <% slice.each do |item| %>
- <%= rendered "components/item" %>
- <% end %>
- <% end %>
-</div>
-<div class="pure-g h-box">
- <div class="pure-u-1 pure-u-lg-1-5">
- <% if page > 1 %>
- <a href="/channel/<%= channel.ucid %>?page=<%= page - 1 %><% if sort_by != "newest" %>&sort_by=<%= HTML.escape(sort_by) %><% end %>">
- <%= translate(locale, "Previous page") %>
- </a>
- <% end %>
- </div>
- <div class="pure-u-1 pure-u-lg-3-5"></div>
- <div class="pure-u-1 pure-u-lg-1-5" style="text-align:right">
- <% if count == 60 %>
- <a href="/channel/<%= channel.ucid %>?page=<%= page + 1 %><% if sort_by != "newest" %>&sort_by=<%= HTML.escape(sort_by) %><% end %>">
- <%= translate(locale, "Next page") %>
- </a>
- <% end %>
- </div>
-</div>
+<%= rendered "components/items_paginated" %>
diff --git a/src/invidious/views/community.ecr b/src/invidious/views/community.ecr
index 3c4eaabb..d2a305d3 100644
--- a/src/invidious/views/community.ecr
+++ b/src/invidious/views/community.ecr
@@ -1,61 +1,21 @@
-<% content_for "header" do %>
-<title><%= channel.author %> - Invidious</title>
-<% end %>
-
-<% if channel.banner %>
- <div class="h-box">
- <img style="width:100%" src="/ggpht<%= URI.parse(channel.banner.not_nil!.gsub("=w1060-", "=w1280-")).request_target %>">
- </div>
+<%-
+ ucid = channel.ucid
+ author = HTML.escape(channel.author)
+ channel_profile_pic = URI.parse(channel.author_thumbnail).request_target
- <div class="h-box">
- <hr>
- </div>
-<% end %>
-
-<div class="pure-g h-box">
- <div class="pure-u-2-3">
- <div class="channel-profile">
- <img src="/ggpht<%= URI.parse(channel.author_thumbnail).request_target %>">
- <span><%= channel.author %></span>
- </div>
- </div>
- <div class="pure-u-1-3" style="text-align:right">
- <h3>
- <a href="/feed/channel/<%= channel.ucid %>"><i class="icon ion-logo-rss"></i></a>
- </h3>
- </div>
-</div>
+ relative_url = "/channel/#{ucid}/community"
+ youtube_url = "https://www.youtube.com#{relative_url}"
+ redirect_url = Invidious::Frontend::Misc.redirect_url(env)
-<div class="h-box">
- <p><span style="white-space:pre-wrap"><%= XML.parse_html(channel.description_html).xpath_node(%q(.//pre)).try &.content %></span></p>
-</div>
+ selected_tab = Invidious::Frontend::ChannelPage::TabsAvailable::Community
+-%>
-<div class="h-box">
- <% ucid = channel.ucid %>
- <% author = channel.author %>
- <% sub_count_text = number_to_short_text(channel.sub_count) %>
- <%= rendered "components/subscribe_widget" %>
-</div>
+<% content_for "header" do %>
+<link rel="alternate" href="<%= youtube_url %>">
+<title><%= author %> - Invidious</title>
+<% end %>
-<div class="pure-g h-box">
- <div class="pure-u-1-3">
- <a href="https://www.youtube.com/channel/<%= channel.ucid %>/community"><%= translate(locale, "View channel on YouTube") %></a>
- <% if !channel.auto_generated %>
- <div class="pure-u-1 pure-md-1-3">
- <a href="/channel/<%= channel.ucid %>"><%= translate(locale, "Videos") %></a>
- </div>
- <% end %>
- <div class="pure-u-1 pure-md-1-3">
- <a href="/channel/<%= channel.ucid %>/playlists"><%= translate(locale, "Playlists") %></a>
- </div>
- <div class="pure-u-1 pure-md-1-3">
- <% if channel.tabs.includes? "community" %>
- <b><%= translate(locale, "Community") %></b>
- <% end %>
- </div>
- </div>
- <div class="pure-u-2-3"></div>
-</div>
+<%= rendered "components/channel_info" %>
<div class="h-box">
<hr>
@@ -66,15 +26,15 @@
<p><%= error_message %></p>
</div>
<% else %>
- <div class="h-box pure-g" id="comments">
- <%= template_youtube_comments(items.not_nil!, locale, thin_mode) %>
+ <div class="h-box pure-g comments" id="comments">
+ <%= IV::Frontend::Comments.template_youtube(items.not_nil!, locale, thin_mode) %>
</div>
<% end %>
<script id="community_data" type="application/json">
<%=
{
- "ucid" => channel.ucid,
+ "ucid" => ucid,
"youtube_comments_text" => HTML.escape(translate(locale, "View YouTube comments")),
"comments_text" => HTML.escape(translate(locale, "View `x` comments", "{commentCount}")),
"hide_replies_text" => HTML.escape(translate(locale, "Hide replies")),
diff --git a/src/invidious/views/components/channel_info.ecr b/src/invidious/views/components/channel_info.ecr
new file mode 100644
index 00000000..f4164f31
--- /dev/null
+++ b/src/invidious/views/components/channel_info.ecr
@@ -0,0 +1,61 @@
+<% if channel.banner %>
+ <div class="h-box">
+ <img style="width:100%" src="/ggpht<%= URI.parse(channel.banner.not_nil!.gsub("=w1060-", "=w1280-")).request_target %>" alt="" />
+ </div>
+
+ <div class="h-box">
+ <hr>
+ </div>
+<% end %>
+
+<div class="pure-g h-box flexible title">
+ <div class="pure-u-1-2 flex-left flexible">
+ <div class="channel-profile">
+ <img src="/ggpht<%= channel_profile_pic %>" alt="" />
+ <span><%= author %></span><% if !channel.verified.nil? && channel.verified %>&nbsp;<i class="icon ion ion-md-checkmark-circle"></i><% end %>
+ </div>
+ </div>
+
+ <div class="pure-u-1-2 flex-right flexible button-container">
+ <div class="pure-u">
+ <% sub_count_text = number_to_short_text(channel.sub_count) %>
+ <%= rendered "components/subscribe_widget" %>
+ </div>
+
+ <div class="pure-u">
+ <a class="pure-button pure-button-secondary" dir="auto" href="/feed/channel/<%= ucid %>">
+ <i class="icon ion-logo-rss"></i>&nbsp;<%= translate(locale, "generic_button_rss") %>
+ </a>
+ </div>
+ </div>
+</div>
+
+<div class="h-box">
+ <div id="descriptionWrapper"><p><span style="white-space:pre-wrap"><%= channel.description_html %></span></p></div>
+</div>
+
+<div class="pure-g h-box">
+ <div class="pure-u-1-2">
+ <div class="pure-u-1 pure-md-1-3">
+ <a href="<%= youtube_url %>"><%= translate(locale, "View channel on YouTube") %></a>
+ </div>
+ <div class="pure-u-1 pure-md-1-3">
+ <a href="<%= redirect_url %>"><%= translate(locale, "Switch Invidious Instance") %></a>
+ </div>
+
+ <%= Invidious::Frontend::ChannelPage.generate_tabs_links(locale, channel, selected_tab) %>
+ </div>
+ <div class="pure-u-1-2">
+ <div class="pure-g" style="text-align:end">
+ <% sort_options.each do |sort| %>
+ <div class="pure-u-1 pure-md-1-3">
+ <% if sort_by == sort %>
+ <b><%= translate(locale, sort) %></b>
+ <% else %>
+ <a href="<%= relative_url %>?sort_by=<%= sort %>"><%= translate(locale, sort) %></a>
+ <% end %>
+ </div>
+ <% end %>
+ </div>
+ </div>
+</div>
diff --git a/src/invidious/views/components/item.ecr b/src/invidious/views/components/item.ecr
index 9dfa047e..6d227cfc 100644
--- a/src/invidious/views/components/item.ecr
+++ b/src/invidious/views/components/item.ecr
@@ -1,167 +1,205 @@
+<%-
+ thin_mode = env.get("preferences").as(Preferences).thin_mode
+ item_watched = !item.is_a?(SearchChannel | SearchHashtag | SearchPlaylist | InvidiousPlaylist | Category) && env.get?("user").try &.as(User).watched.index(item.id) != nil
+ author_verified = item.responds_to?(:author_verified) && item.author_verified
+-%>
+
<div class="pure-u-1 pure-u-md-1-4">
<div class="h-box">
<% case item when %>
<% when SearchChannel %>
- <a style="width:100%" href="/channel/<%= item.ucid %>">
- <% if !env.get("preferences").as(Preferences).thin_mode %>
+ <% if !thin_mode %>
+ <a tabindex="-1" href="/channel/<%= item.ucid %>">
<center>
- <img style="width:56.25%" src="/ggpht<%= URI.parse(item.author_thumbnail).request_target.gsub(/=s\d+/, "=s176") %>"/>
+ <img loading="lazy" style="width:56.25%" src="/ggpht<%= URI.parse(item.author_thumbnail).request_target.gsub(/=s\d+/, "=s176") %>" alt="" />
</center>
- <% end %>
- <p><%= item.author %></p>
- </a>
- <p><%= translate(locale, "`x` subscribers", number_with_separator(item.subscriber_count)) %></p>
- <% if !item.auto_generated %><p><%= translate(locale, "`x` videos", number_with_separator(item.video_count)) %></p><% end %>
+ </a>
+ <%- else -%>
+ <div class="thumbnail-placeholder" style="width:56.25%"></div>
+ <% end %>
+
+ <div class="video-card-row flexible">
+ <div class="flex-left"><a href="/channel/<%= item.ucid %>">
+ <p class="channel-name" dir="auto"><%= HTML.escape(item.author) %>
+ <%- if author_verified %>&nbsp;<i class="icon ion ion-md-checkmark-circle"></i><% end -%>
+ </p>
+ </a></div>
+ </div>
+
+ <% if !item.channel_handle.nil? %><p class="channel-name" dir="auto"><%= item.channel_handle %></p><% end %>
+ <p><%= translate_count(locale, "generic_subscribers_count", item.subscriber_count, NumberFormatting::Separator) %></p>
+ <% if !item.auto_generated && item.channel_handle.nil? %><p><%= translate_count(locale, "generic_videos_count", item.video_count, NumberFormatting::Separator) %></p><% end %>
<h5><%= item.description_html %></h5>
- <% when SearchPlaylist, InvidiousPlaylist %>
- <% if item.id.starts_with? "RD" %>
- <% url = "/mix?list=#{item.id}&continuation=#{URI.parse(item.thumbnail || "/vi/-----------").request_target.split("/")[2]}" %>
- <% else %>
- <% url = "/playlist?list=#{item.id}" %>
+ <% when SearchHashtag %>
+ <% if !thin_mode %>
+ <a tabindex="-1" href="<%= item.url %>">
+ <center><img style="width:56.25%" src="/hashtag.svg" alt="" /></center>
+ </a>
+ <%- else -%>
+ <div class="thumbnail-placeholder" style="width:56.25%"></div>
<% end %>
- <a style="width:100%" href="<%= url %>">
- <% if !env.get("preferences").as(Preferences).thin_mode %>
- <div class="thumbnail">
- <img class="thumbnail" src="<%= URI.parse(item.thumbnail || "/").request_target %>"/>
- <p class="length"><%= number_with_separator(item.video_count) %> videos</p>
- </div>
- <% end %>
- <p><%= item.title %></p>
- </a>
- <p>
- <b>
- <a style="width:100%" href="/channel/<%= item.ucid %>"><%= item.author %></a>
- </b>
- </p>
- <% when MixVideo %>
- <a style="width:100%" href="/watch?v=<%= item.id %>&list=<%= item.rdid %>">
- <% if !env.get("preferences").as(Preferences).thin_mode %>
- <div class="thumbnail">
- <img class="thumbnail" src="/vi/<%= item.id %>/mqdefault.jpg"/>
- <% if item.length_seconds != 0 %>
- <p class="length"><%= recode_length_seconds(item.length_seconds) %></p>
- <% end %>
- </div>
- <% end %>
- <p><%= HTML.escape(item.title) %></p>
- </a>
- <p>
- <b>
- <a style="width:100%" href="/channel/<%= item.ucid %>"><%= item.author %></a>
- </b>
- </p>
- <% when PlaylistVideo %>
- <a style="width:100%" href="/watch?v=<%= item.id %>&list=<%= item.plid %>">
- <% if !env.get("preferences").as(Preferences).thin_mode %>
- <div class="thumbnail">
- <img class="thumbnail" src="/vi/<%= item.id %>/mqdefault.jpg"/>
- <% if plid = env.get?("remove_playlist_items") %>
- <form data-onsubmit="return_false" action="/playlist_ajax?action_remove_video=1&set_video_id=<%= item.index %>&playlist_id=<%= plid %>&referer=<%= env.get("current_page") %>" method="post">
- <input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(env.get?("csrf_token").try &.as(String) || "") %>">
- <p class="watched">
- <a data-onclick="remove_playlist_item" data-index="<%= item.index %>" data-plid="<%= plid %>" href="javascript:void(0)">
- <button type="submit" style="all:unset">
- <i class="icon ion-md-trash"></i>
- </button>
- </a>
- </p>
- </form>
- <% end %>
+ <div class="video-card-row">
+ <div class="flex-left"><a href="<%= item.url %>"><%= HTML.escape(item.title) %></a></div>
+ </div>
- <% if item.responds_to?(:live_now) && item.live_now %>
- <p class="length"><i class="icon ion-ios-play-circle"></i> <%= translate(locale, "LIVE") %></p>
- <% elsif item.length_seconds != 0 %>
- <p class="length"><%= recode_length_seconds(item.length_seconds) %></p>
- <% end %>
- </div>
- <% end %>
- <p><a href="/watch?v=<%= item.id %>"><%= HTML.escape(item.title) %></a></p>
- </a>
- <p>
- <b>
- <a style="width:100%" href="/channel/<%= item.ucid %>"><%= item.author %></a>
- </b>
- </p>
-
- <h5 class="pure-g">
- <% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp.try &.> Time.utc %>
- <div class="pure-u-2-3"><%= translate(locale, "Premieres in `x`", recode_date((item.premiere_timestamp.as(Time) - Time.utc).ago, locale)) %></div>
- <% elsif Time.utc - item.published > 1.minute %>
- <div class="pure-u-2-3"><%= translate(locale, "Shared `x` ago", recode_date(item.published, locale)) %></div>
- <% else %>
- <div class="pure-u-2-3"></div>
- <% end %>
+ <div class="video-card-row">
+ <%- if item.video_count != 0 -%>
+ <p><%= translate_count(locale, "generic_videos_count", item.video_count, NumberFormatting::Separator) %></p>
+ <%- end -%>
+ </div>
+
+ <div class="video-card-row">
+ <%- if item.channel_count != 0 -%>
+ <p><%= translate_count(locale, "generic_channels_count", item.channel_count, NumberFormatting::Separator) %></p>
+ <%- end -%>
+ </div>
+ <% when SearchPlaylist, InvidiousPlaylist %>
+ <%-
+ if item.id.starts_with? "RD"
+ link_url = "/mix?list=#{item.id}&continuation=#{URI.parse(item.thumbnail || "/vi/-----------").request_target.split("/")[2]}"
+ else
+ link_url = "/playlist?list=#{item.id}"
+ end
+ -%>
+
+ <div class="thumbnail">
+ <%- if !thin_mode %>
+ <a tabindex="-1" href="<%= link_url %>">
+ <img loading="lazy" class="thumbnail" src="<%= URI.parse(item.thumbnail || "/").request_target %>" alt="" />
+ </a>
+ <%- else -%>
+ <div class="thumbnail-placeholder"></div>
+ <%- end -%>
+
+ <div class="bottom-right-overlay">
+ <p class="length"><%= translate_count(locale, "generic_videos_count", item.video_count, NumberFormatting::Separator) %></p>
+ </div>
+ </div>
- <div class="pure-u-1-3" style="text-align:right">
- <%= item.responds_to?(:views) && item.views ? translate(locale, "`x` views", number_to_short_text(item.views || 0)) : "" %>
+ <div class="video-card-row">
+ <a href="<%= link_url %>"><p dir="auto"><%= HTML.escape(item.title) %></p></a>
+ </div>
+
+ <div class="video-card-row flexible">
+ <div class="flex-left">
+ <% if !item.ucid.to_s.empty? %>
+ <a href="/channel/<%= item.ucid %>">
+ <p class="channel-name" dir="auto"><%= HTML.escape(item.author) %>
+ <%- if author_verified %>&nbsp;<i class="icon ion ion-md-checkmark-circle"></i><% end -%>
+ </p>
+ </a>
+ <% else %>
+ <p class="channel-name" dir="auto"><%= HTML.escape(item.author) %>
+ <%- if author_verified %>&nbsp;<i class="icon ion ion-md-checkmark-circle"></i><% end -%>
+ </p>
+ <% end %>
</div>
- </h5>
+ </div>
+ <% when Category %>
<% else %>
- <% if !env.get("preferences").as(Preferences).thin_mode %>
- <a style="width:100%" href="/watch?v=<%= item.id %>">
- <div class="thumbnail">
- <img class="thumbnail" src="/vi/<%= item.id %>/mqdefault.jpg"/>
- <% if env.get? "show_watched" %>
- <form data-onsubmit="return_false" action="/watch_ajax?action_mark_watched=1&id=<%= item.id %>&referer=<%= env.get("current_page") %>" method="post">
- <input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(env.get?("csrf_token").try &.as(String) || "") %>">
- <p class="watched">
- <a data-onclick="mark_watched" data-id="<%= item.id %>" href="javascript:void(0)">
- <button type="submit" style="all:unset">
- <i data-mouse="switch_classes" data-switch-classes="ion-ios-eye-off,ion-ios-eye"
- class="icon ion-ios-eye">
- </i>
- </button>
- </a>
- </p>
- </form>
- <% elsif plid = env.get? "add_playlist_items" %>
- <form data-onsubmit="return_false" action="/playlist_ajax?action_add_video=1&video_id=<%= item.id %>&playlist_id=<%= plid %>&referer=<%= env.get("current_page") %>" method="post">
- <input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(env.get?("csrf_token").try &.as(String) || "") %>">
- <p class="watched">
- <a data-onclick="add_playlist_item" data-id="<%= item.id %>" data-plid="<%= plid %>" href="javascript:void(0)">
- <button type="submit" style="all:unset">
- <i class="icon ion-md-add"></i>
- </button>
- </a>
- </p>
- </form>
- <% end %>
+ <%-
+ # `endpoint_params` is used for the "video-context-buttons" component
+ if item.is_a?(PlaylistVideo)
+ link_url = "/watch?v=#{item.id}&list=#{item.plid}&index=#{item.index}"
+ endpoint_params = "?v=#{item.id}&list=#{item.plid}"
+ elsif item.is_a?(MixVideo)
+ link_url = "/watch?v=#{item.id}&list=#{item.rdid}"
+ endpoint_params = "?v=#{item.id}&list=#{item.rdid}"
+ else
+ link_url = "/watch?v=#{item.id}"
+ endpoint_params = "?v=#{item.id}"
+ end
+ -%>
+
+ <div class="thumbnail">
+ <%- if !thin_mode -%>
+ <a tabindex="-1" href="<%= link_url %>">
+ <img loading="lazy" class="thumbnail" src="/vi/<%= item.id %>/mqdefault.jpg" alt="" />
- <% if item.responds_to?(:live_now) && item.live_now %>
- <p class="length"><i class="icon ion-ios-play-circle"></i> <%= translate(locale, "LIVE") %></p>
- <% elsif item.length_seconds != 0 %>
- <p class="length"><%= recode_length_seconds(item.length_seconds) %></p>
+ <% if item_watched %>
+ <div class="watched-overlay"></div>
+ <div class="watched-indicator" data-length="<%= item.length_seconds %>" data-id="<%= item.id %>"></div>
<% end %>
- </div>
- </a>
- <% end %>
- <p><a href="/watch?v=<%= item.id %>"><%= HTML.escape(item.title) %></a></p>
- <p style="display: flex;">
- <b style="flex: 1;">
- <a style="width:100%" href="/channel/<%= item.ucid %>"><%= item.author %></a>
- </b>
- <a title="<%=translate(locale, "Watch on YouTube")%>" href="https://www.youtube.com/watch?v=<%= item.id %>" style="margin-right: 5px;">
- <i class="icon ion-logo-youtube"></i>
- </a>
- <a title="<%=translate(locale, "Audio mode")%>" href="/watch?v=<%= item.id %>&amp;listen=1">
- <i class="icon ion-md-headset"></i>
- </a>
- </p>
-
- <h5 class="pure-g">
- <% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp.try &.> Time.utc %>
- <div class="pure-u-2-3"><%= translate(locale, "Premieres in `x`", recode_date((item.premiere_timestamp.as(Time) - Time.utc).ago, locale)) %></div>
- <% elsif Time.utc - item.published > 1.minute %>
- <div class="pure-u-2-3"><%= translate(locale, "Shared `x` ago", recode_date(item.published, locale)) %></div>
- <% else %>
- <div class="pure-u-2-3"></div>
- <% end %>
+ </a>
+ <%- else -%>
+ <div class="thumbnail-placeholder"></div>
+ <%- end -%>
+
+ <div class="top-left-overlay">
+ <%- if env.get? "show_watched" -%>
+ <form data-onsubmit="return_false" action="/watch_ajax?action_mark_watched=1&id=<%= item.id %>&referer=<%= env.get("current_page") %>" method="post">
+ <input type="hidden" name="csrf_token" value="<%= HTML.escape(env.get?("csrf_token").try &.as(String) || "") %>">
+ <button type="submit" class="pure-button pure-button-secondary low-profile"
+ data-onclick="mark_watched" data-id="<%= item.id %>">
+ <i data-mouse="switch_classes" data-switch-classes="ion-ios-eye-off,ion-ios-eye" class="icon ion-ios-eye"></i>
+ </button>
+ </form>
+ <%- end -%>
+
+ <%- if plid_form = env.get?("add_playlist_items") -%>
+ <%- form_parameters = "action_add_video=1&video_id=#{item.id}&playlist_id=#{plid_form}&referer=#{env.get("current_page")}" -%>
+ <form data-onsubmit="return_false" action="/playlist_ajax?<%= form_parameters %>" method="post">
+ <input type="hidden" name="csrf_token" value="<%= HTML.escape(env.get?("csrf_token").try &.as(String) || "") %>">
+ <button type="submit" class="pure-button pure-button-secondary low-profile"
+ data-onclick="add_playlist_item" data-id="<%= item.id %>" data-plid="<%= plid_form %>"><i class="icon ion-md-add"></i></button>
+ </form>
+ <%- elsif item.is_a?(PlaylistVideo) && (plid_form = env.get?("remove_playlist_items")) -%>
+ <%- form_parameters = "action_remove_video=1&set_video_id=#{item.index}&playlist_id=#{plid_form}&referer=#{env.get("current_page")}" -%>
+ <form data-onsubmit="return_false" action="/playlist_ajax?<%= form_parameters %>" method="post">
+ <input type="hidden" name="csrf_token" value="<%= HTML.escape(env.get?("csrf_token").try &.as(String) || "") %>">
+ <button type="submit" class="pure-button pure-button-secondary low-profile"
+ data-onclick="remove_playlist_item" data-index="<%= item.index %>" data-plid="<%= plid_form %>"><i class="icon ion-md-trash"></i></button>
+ </form>
+ <%- end -%>
+ </div>
+
+ <div class="bottom-right-overlay">
+ <%- if item.responds_to?(:live_now) && item.live_now -%>
+ <p class="length" dir="auto"><i class="icon ion-ios-play-circle"></i>&nbsp;<%= translate(locale, "LIVE") %></p>
+ <%- elsif item.length_seconds != 0 -%>
+ <p class="length"><%= recode_length_seconds(item.length_seconds) %></p>
+ <%- end -%>
+ </div>
+ </div>
+
+ <div class="video-card-row">
+ <a href="<%= link_url %>"><p dir="auto"><%= HTML.escape(item.title) %></p></a>
+ </div>
+
+ <div class="video-card-row flexible">
+ <div class="flex-left">
+ <% if !item.ucid.to_s.empty? %>
+ <a href="/channel/<%= item.ucid %>">
+ <p class="channel-name" dir="auto"><%= HTML.escape(item.author) %>
+ <%- if author_verified %>&nbsp;<i class="icon ion ion-md-checkmark-circle"></i><% end -%>
+ </p>
+ </a>
+ <% else %>
+ <p class="channel-name" dir="auto"><%= HTML.escape(item.author) %>
+ <%- if author_verified %>&nbsp;<i class="icon ion ion-md-checkmark-circle"></i><% end -%>
+ </p>
+ <% end %>
+ </div>
+
+ <%= rendered "components/video-context-buttons" %>
+ </div>
- <div class="pure-u-1-3" style="text-align:right">
- <%= item.responds_to?(:views) && item.views ? translate(locale, "`x` views", number_to_short_text(item.views || 0)) : "" %>
+ <div class="video-card-row flexible">
+ <div class="flex-left">
+ <% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp.try &.> Time.utc %>
+ <p class="video-data" dir="auto"><%= translate(locale, "Premieres in `x`", recode_date((item.premiere_timestamp.as(Time) - Time.utc).ago, locale)) %></p>
+ <% elsif item.responds_to?(:published) && (Time.utc - item.published) > 1.minute %>
+ <p class="video-data" dir="auto"><%= translate(locale, "Shared `x` ago", recode_date(item.published, locale)) %></p>
+ <% end %>
</div>
- </h5>
+
+ <% if item.responds_to?(:views) && item.views %>
+ <div class="flex-right">
+ <p class="video-data" dir="auto"><%= translate_count(locale, "generic_views_count", item.views || 0, NumberFormatting::Short) %></p>
+ </div>
+ <% end %>
+ </div>
<% end %>
</div>
</div>
diff --git a/src/invidious/views/components/items_paginated.ecr b/src/invidious/views/components/items_paginated.ecr
new file mode 100644
index 00000000..4534a0a3
--- /dev/null
+++ b/src/invidious/views/components/items_paginated.ecr
@@ -0,0 +1,11 @@
+<%= page_nav_html %>
+
+<div class="pure-g">
+ <%- items.each do |item| -%>
+ <%= rendered "components/item" %>
+ <%- end -%>
+</div>
+
+<%= page_nav_html %>
+
+<script src="/js/watched_indicator.js"></script>
diff --git a/src/invidious/views/components/player.ecr b/src/invidious/views/components/player.ecr
index cff3e60a..5c28358b 100644
--- a/src/invidious/views/components/player.ecr
+++ b/src/invidious/views/components/player.ecr
@@ -1,5 +1,6 @@
<video style="outline:none;width:100%;background-color:#000" playsinline poster="<%= thumbnail %>"
id="player" class="on-video_player video-js player-style-<%= params.player_style %>"
+ preload="<% if params.preload %>auto<% else %>none<% end %>"
<% if params.autoplay %>autoplay<% end %>
<% if params.video_loop %>loop<% end %>
<% if params.controls %>controls<% end %>>
@@ -7,31 +8,61 @@
<source src="<%= URI.parse(hlsvp).request_target %><% if params.local %>?local=true<% end %>" type="application/x-mpegURL" label="livestream">
<% else %>
<% if params.listen %>
- <% audio_streams.each_with_index do |fmt, i| %>
- <source src="/latest_version?id=<%= video.id %>&itag=<%= fmt["itag"] %><% if params.local %>&local=true<% end %>" type='<%= fmt["mimeType"] %>' label="<%= fmt["bitrate"] %>k" selected="<%= i == 0 ? true : false %>">
+ <% # default to 128k m4a stream
+ best_m4a_stream_index = 0
+ best_m4a_stream_bitrate = 0
+ audio_streams.each_with_index do |fmt, i|
+ bandwidth = fmt["bitrate"].as_i
+ if (fmt["mimeType"].as_s.starts_with?("audio/mp4") && bandwidth > best_m4a_stream_bitrate)
+ best_m4a_stream_bitrate = bandwidth
+ best_m4a_stream_index = i
+ end
+ end
+
+ audio_streams.each_with_index do |fmt, i|
+ src_url = "/latest_version?id=#{video.id}&itag=#{fmt["itag"]}"
+ src_url += "&local=true" if params.local
+
+ bitrate = fmt["bitrate"]
+ mimetype = HTML.escape(fmt["mimeType"].as_s)
+
+ selected = (i == best_m4a_stream_index)
+ %>
+ <source src="<%= src_url %>" type='<%= mimetype %>' label="<%= bitrate %>k" selected="<%= selected %>">
+ <% if !params.local && !CONFIG.disabled?("local") %>
+ <source src="<%= src_url %>&local=true" type='<%= mimetype %>' hidequalityoption="true">
+ <% end %>
<% end %>
- <% else %>
+ <% else %>
<% if params.quality == "dash" %>
<source src="/api/manifest/dash/id/<%= video.id %>?local=true&unique_res=1" type='application/dash+xml' label="dash">
<% end %>
- <% fmt_stream.each_with_index do |fmt, i| %>
- <% if params.quality %>
- <source src="/latest_version?id=<%= video.id %>&itag=<%= fmt["itag"] %><% if params.local %>&local=true<% end %>" type='<%= fmt["mimeType"] %>' label="<%= fmt["quality"] %>" selected="<%= params.quality == fmt["quality"] %>">
- <% else %>
- <source src="/latest_version?id=<%= video.id %>&itag=<%= fmt["itag"] %><% if params.local %>&local=true<% end %>" type='<%= fmt["mimeType"] %>' label="<%= fmt["quality"] %>" selected="<%= i == 0 ? true : false %>">
+ <%
+ fmt_stream.reject! { |f| f["itag"] == 17 }
+ fmt_stream.sort_by! {|f| params.quality == f["quality"] ? 0 : 1 }
+ fmt_stream.each_with_index do |fmt, i|
+ src_url = "/latest_version?id=#{video.id}&itag=#{fmt["itag"]}"
+ src_url += "&local=true" if params.local
+
+ quality = fmt["quality"]
+ mimetype = HTML.escape(fmt["mimeType"].as_s)
+
+ selected = params.quality ? (params.quality == quality) : (i == 0)
+ %>
+ <source src="<%= src_url %>" type="<%= mimetype %>" label="<%= quality %>" selected="<%= selected %>">
+ <% if !params.local && !CONFIG.disabled?("local") %>
+ <source src="<%= src_url %>&local=true" type="<%= mimetype %>" hidequalityoption="true">
<% end %>
<% end %>
<% end %>
<% preferred_captions.each do |caption| %>
- <track kind="captions" src="/api/v1/captions/<%= video.id %>?label=<%= caption.name.simpleText %>&hl=<%= env.get("preferences").as(Preferences).locale %>"
- label="<%= caption.name.simpleText %>">
+ <track kind="captions" src="/api/v1/captions/<%= video.id %>?label=<%= caption.name %>" label="<%= caption.name %>">
<% end %>
<% captions.each do |caption| %>
- <track kind="captions" src="/api/v1/captions/<%= video.id %>?label=<%= caption.name.simpleText %>&hl=<%= env.get("preferences").as(Preferences).locale %>"
- label="<%= caption.name.simpleText %>">
+ <track kind="captions" src="/api/v1/captions/<%= video.id %>?label=<%= caption.name %>" label="<%= caption.name %>">
<% end %>
<% end %>
</video>
diff --git a/src/invidious/views/components/player_sources.ecr b/src/invidious/views/components/player_sources.ecr
index a99fdbca..9af3899c 100644
--- a/src/invidious/views/components/player_sources.ecr
+++ b/src/invidious/views/components/player_sources.ecr
@@ -1,17 +1,19 @@
-<link rel="stylesheet" href="/css/video-js.min.css?v=<%= ASSET_COMMIT %>">
-<link rel="stylesheet" href="/css/videojs-http-source-selector.css?v=<%= ASSET_COMMIT %>">
-<link rel="stylesheet" href="/css/videojs.markers.min.css?v=<%= ASSET_COMMIT %>">
-<link rel="stylesheet" href="/css/videojs-share.css?v=<%= ASSET_COMMIT %>">
-<link rel="stylesheet" href="/css/videojs-vtt-thumbnails.css?v=<%= ASSET_COMMIT %>">
-<link rel="stylesheet" href="/css/videojs-mobile-ui.css?v=<%= ASSET_COMMIT %>">
+<link rel="stylesheet" href="/videojs/video.js/video-js.css?v=<%= ASSET_COMMIT %>">
+<link rel="stylesheet" href="/videojs/videojs-http-source-selector/videojs-http-source-selector.css?v=<%= ASSET_COMMIT %>">
+<link rel="stylesheet" href="/videojs/videojs-markers/videojs.markers.css?v=<%= ASSET_COMMIT %>">
+<link rel="stylesheet" href="/videojs/videojs-share/videojs-share.css?v=<%= ASSET_COMMIT %>">
+<link rel="stylesheet" href="/videojs/videojs-vtt-thumbnails/videojs-vtt-thumbnails.css?v=<%= ASSET_COMMIT %>">
+<link rel="stylesheet" href="/videojs/videojs-mobile-ui/videojs-mobile-ui.css?v=<%= ASSET_COMMIT %>">
<link rel="stylesheet" href="/css/player.css?v=<%= ASSET_COMMIT %>">
-<script src="/js/video.min.js?v=<%= ASSET_COMMIT %>"></script>
-<script src="/js/videojs-mobile-ui.min.js?v=<%= ASSET_COMMIT %>"></script>
-<script src="/js/videojs-contrib-quality-levels.min.js?v=<%= ASSET_COMMIT %>"></script>
-<script src="/js/videojs-http-source-selector.min.js?v=<%= ASSET_COMMIT %>"></script>
-<script src="/js/videojs-markers.min.js?v=<%= ASSET_COMMIT %>"></script>
-<script src="/js/videojs-share.min.js?v=<%= ASSET_COMMIT %>"></script>
-<script src="/js/videojs-vtt-thumbnails.min.js?v=<%= ASSET_COMMIT %>"></script>
+
+<script src="/videojs/video.js/video.js?v=<%= ASSET_COMMIT %>"></script>
+<script src="/videojs/videojs-mobile-ui/videojs-mobile-ui.js?v=<%= ASSET_COMMIT %>"></script>
+<script src="/videojs/videojs-contrib-quality-levels/videojs-contrib-quality-levels.js?v=<%= ASSET_COMMIT %>"></script>
+<script src="/videojs/videojs-http-source-selector/videojs-http-source-selector.js?v=<%= ASSET_COMMIT %>"></script>
+<script src="/videojs/videojs-markers/videojs-markers.js?v=<%= ASSET_COMMIT %>"></script>
+<script src="/videojs/videojs-share/videojs-share.js?v=<%= ASSET_COMMIT %>"></script>
+<script src="/videojs/videojs-vtt-thumbnails/videojs-vtt-thumbnails.js?v=<%= ASSET_COMMIT %>"></script>
+
<% if params.annotations %>
<link rel="stylesheet" href="/css/videojs-youtube-annotations.min.css?v=<%= ASSET_COMMIT %>">
@@ -22,3 +24,8 @@
<link rel="stylesheet" href="/css/quality-selector.css?v=<%= ASSET_COMMIT %>">
<script src="/js/silvermine-videojs-quality-selector.min.js?v=<%= ASSET_COMMIT %>"></script>
<% end %>
+
+<% if !params.listen && params.vr_mode %>
+ <link rel="stylesheet" href="/videojs/videojs-vr/videojs-vr.css?v=<%= ASSET_COMMIT %>">
+ <script src="/videojs/videojs-vr/videojs-vr.js?v=<%= ASSET_COMMIT %>"></script>
+<% end %>
diff --git a/src/invidious/views/components/search_box.ecr b/src/invidious/views/components/search_box.ecr
new file mode 100644
index 00000000..29da2c52
--- /dev/null
+++ b/src/invidious/views/components/search_box.ecr
@@ -0,0 +1,12 @@
+<form class="pure-form" action="/search" method="get">
+ <fieldset>
+ <input type="search" id="searchbox" autocorrect="off"
+ autocapitalize="none" spellcheck="false" <% if autofocus %>autofocus<% end %>
+ name="q" placeholder="<%= translate(locale, "search") %>"
+ title="<%= translate(locale, "search") %>"
+ value="<%= env.get?("search").try {|x| HTML.escape(x.as(String)) } %>">
+ </fieldset>
+ <button type="submit" id="searchbutton" aria-label="<%= translate(locale, "search") %>">
+ <i class="icon ion-ios-search"></i>
+ </button>
+</form>
diff --git a/src/invidious/views/components/subscribe_widget.ecr b/src/invidious/views/components/subscribe_widget.ecr
index ac2fbf1d..05e4e253 100644
--- a/src/invidious/views/components/subscribe_widget.ecr
+++ b/src/invidious/views/components/subscribe_widget.ecr
@@ -1,22 +1,18 @@
<% if user %>
<% if subscriptions.includes? ucid %>
- <p>
<form action="/subscription_ajax?action_remove_subscriptions=1&c=<%= ucid %>&referer=<%= env.get("current_page") %>" method="post">
- <input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(env.get?("csrf_token").try &.as(String) || "") %>">
+ <input type="hidden" name="csrf_token" value="<%= HTML.escape(env.get?("csrf_token").try &.as(String) || "") %>">
<button data-type="unsubscribe" id="subscribe" class="pure-button pure-button-primary">
<b><input style="all:unset" type="submit" value="<%= translate(locale, "Unsubscribe") %> | <%= sub_count_text %>"></b>
</button>
</form>
- </p>
<% else %>
- <p>
<form action="/subscription_ajax?action_create_subscription_to_channel=1&c=<%= ucid %>&referer=<%= env.get("current_page") %>" method="post">
- <input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(env.get?("csrf_token").try &.as(String) || "") %>">
+ <input type="hidden" name="csrf_token" value="<%= HTML.escape(env.get?("csrf_token").try &.as(String) || "") %>">
<button data-type="subscribe" id="subscribe" class="pure-button pure-button-primary">
<b><input style="all:unset" type="submit" value="<%= translate(locale, "Subscribe") %> | <%= sub_count_text %>"></b>
</button>
</form>
- </p>
<% end %>
<script id="subscribe_data" type="application/json">
@@ -33,10 +29,8 @@
</script>
<script src="/js/subscribe_widget.js?v=<%= ASSET_COMMIT %>"></script>
<% else %>
- <p>
<a id="subscribe" class="pure-button pure-button-primary"
href="/login?referer=<%= env.get("current_page") %>">
<b><%= translate(locale, "Subscribe") %> | <%= sub_count_text %></b>
</a>
- </p>
<% end %>
diff --git a/src/invidious/views/components/video-context-buttons.ecr b/src/invidious/views/components/video-context-buttons.ecr
new file mode 100644
index 00000000..22458a03
--- /dev/null
+++ b/src/invidious/views/components/video-context-buttons.ecr
@@ -0,0 +1,21 @@
+<div class="flex-right flexible">
+ <div class="icon-buttons">
+ <a title="<%=translate(locale, "videoinfo_watch_on_youTube")%>" rel="noreferrer noopener" href="https://www.youtube.com/watch<%=endpoint_params%>">
+ <i class="icon ion-logo-youtube"></i>
+ </a>
+ <a title="<%=translate(locale, "Audio mode")%>" href="/watch<%=endpoint_params%>&listen=1">
+ <i class="icon ion-md-headset"></i>
+ </a>
+
+ <% if env.get("preferences").as(Preferences).automatic_instance_redirect%>
+ <a title="<%=translate(locale, "Switch Invidious Instance")%>" href="/redirect?referer=%2Fwatch<%=URI.encode_www_form(endpoint_params)%>">
+ <i class="icon ion-md-jet"></i>
+ </a>
+ <% else %>
+ <a title="<%=translate(locale, "Switch Invidious Instance")%>" href="https://redirect.invidious.io/watch<%=endpoint_params%>">
+ <i class="icon ion-md-jet"></i>
+ </a>
+ <% end %>
+
+ </div>
+</div> \ No newline at end of file
diff --git a/src/invidious/views/create_playlist.ecr b/src/invidious/views/create_playlist.ecr
index 14f3673e..807244e6 100644
--- a/src/invidious/views/create_playlist.ecr
+++ b/src/invidious/views/create_playlist.ecr
@@ -30,7 +30,7 @@
</button>
</div>
- <input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(csrf_token) %>">
+ <input type="hidden" name="csrf_token" value="<%= HTML.escape(csrf_token) %>">
</fieldset>
</form>
</div>
diff --git a/src/invidious/views/delete_playlist.ecr b/src/invidious/views/delete_playlist.ecr
index 480e36f4..cd66b963 100644
--- a/src/invidious/views/delete_playlist.ecr
+++ b/src/invidious/views/delete_playlist.ecr
@@ -19,6 +19,6 @@
</div>
</div>
- <input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(csrf_token) %>">
+ <input type="hidden" name="csrf_token" value="<%= HTML.escape(csrf_token) %>">
</form>
</div>
diff --git a/src/invidious/views/edit_playlist.ecr b/src/invidious/views/edit_playlist.ecr
index bd8d6207..34157c67 100644
--- a/src/invidious/views/edit_playlist.ecr
+++ b/src/invidious/views/edit_playlist.ecr
@@ -1,81 +1,60 @@
+<% title = HTML.escape(playlist.title) %>
+
<% content_for "header" do %>
-<title><%= playlist.title %> - Invidious</title>
+<title><%= title %> - Invidious</title>
<link rel="alternate" type="application/rss+xml" title="RSS" href="/feed/playlist/<%= plid %>" />
<% end %>
<form class="pure-form" action="/edit_playlist?list=<%= plid %>" method="post">
- <div class="pure-g h-box">
- <div class="pure-u-2-3">
- <h3><input class="pure-input-1" maxlength="150" name="title" type="text" value="<%= playlist.title %>"></h3>
+ <div class="h-box flexible">
+ <div class="flex-right button-container">
+ <div class="pure-u">
+ <a class="pure-button pure-button-secondary low-profile" dir="auto" href="/playlist?list=<%= plid %>">
+ <i class="icon ion-md-close"></i>&nbsp;<%= translate(locale, "generic_button_cancel") %>
+ </a>
+ </div>
+ <div class="pure-u">
+ <button class="pure-button pure-button-secondary low-profile" dir="auto" type="submit">
+ <i class="icon ion-md-save"></i>&nbsp;<%= translate(locale, "generic_button_save") %>
+ </button>
+ </div>
+ <div class="pure-u">
+ <a class="pure-button pure-button-secondary low-profile" dir="auto" href="/delete_playlist?list=<%= plid %>">
+ <i class="icon ion-md-trash"></i>&nbsp;<%= translate(locale, "generic_button_delete") %>
+ </a>
+ </div>
+ </div>
+ </div>
+
+ <div class="h-box flexible title">
+ <div>
+ <h3><input class="pure-input-1" maxlength="150" name="title" type="text" value="<%= title %>"></h3>
+ </div>
+ </div>
+
+ <div class="h-box">
+ <div class="pure-u-1-1">
<b>
- <%= playlist.author %> |
- <%= translate(locale, "`x` videos", "#{playlist.video_count}") %> |
- <%= translate(locale, "Updated `x` ago", recode_date(playlist.updated, locale)) %> |
- <i class="icon <%= {"ion-md-globe", "ion-ios-unlock", "ion-ios-lock"}[playlist.privacy.value] %>"></i>
- <select name="privacy">
- <% {"Public", "Unlisted", "Private"}.each do |option| %>
- <option value="<%= option %>" <% if option == playlist.privacy.to_s %>selected<% end %>><%= translate(locale, option) %></option>
- <% end %>
- </select>
+ <%= HTML.escape(playlist.author) %> |
+ <%= translate_count(locale, "generic_videos_count", playlist.video_count) %> |
</b>
- </div>
- <div class="pure-u-1-3" style="text-align:right">
- <h3>
- <div class="pure-g user-field">
- <div class="pure-u-1-3">
- <a href="javascript:void(0)">
- <button type="submit" style="all:unset">
- <i class="icon ion-md-save"></i>
- </button>
- </a>
- </div>
- <div class="pure-u-1-3"><a href="/delete_playlist?list=<%= plid %>"><i class="icon ion-md-trash"></i></a></div>
- <div class="pure-u-1-3"><a href="/feed/playlist/<%= plid %>"><i class="icon ion-logo-rss"></i></a></div>
- </div>
- </h3>
+ <select name="privacy">
+ <%- {"Public", "Unlisted", "Private"}.each do |option| -%>
+ <option value="<%= option %>" <% if option == playlist.privacy.to_s %>selected<% end %>><%= translate(locale, option) %></option>
+ <%- end -%>
+ </select>
</div>
</div>
<div class="h-box">
<textarea maxlength="5000" name="description" style="margin-top:10px;max-width:100%;height:20vh" class="pure-input-1"><%= playlist.description %></textarea>
</div>
- <input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(csrf_token) %>">
+ <input type="hidden" name="csrf_token" value="<%= HTML.escape(csrf_token) %>">
</form>
-<% if playlist.is_a?(InvidiousPlaylist) && playlist.author == user.try &.email %>
-<div class="h-box" style="text-align:right">
- <h3>
- <a href="/add_playlist_items?list=<%= plid %>"><i class="icon ion-md-add"></i></a>
- </h3>
-</div>
-<% end %>
-
<div class="h-box">
<hr>
</div>
-<div class="pure-g">
- <% videos.each_slice(4) do |slice| %>
- <% slice.each do |item| %>
- <%= rendered "components/item" %>
- <% end %>
- <% end %>
-</div>
-<div class="pure-g h-box">
- <div class="pure-u-1 pure-u-lg-1-5">
- <% if page > 1 %>
- <a href="/playlist?list=<%= playlist.id %>&page=<%= page - 1 %>">
- <%= translate(locale, "Previous page") %>
- </a>
- <% end %>
- </div>
- <div class="pure-u-1 pure-u-lg-3-5"></div>
- <div class="pure-u-1 pure-u-lg-1-5" style="text-align:right">
- <% if videos.size == 100 %>
- <a href="/playlist?list=<%= playlist.id %>&page=<%= page + 1 %>">
- <%= translate(locale, "Next page") %>
- </a>
- <% end %>
- </div>
-</div>
+<%= rendered "components/items_paginated" %>
diff --git a/src/invidious/views/embed.ecr b/src/invidious/views/embed.ecr
index dbb86009..1bf5cc3e 100644
--- a/src/invidious/views/embed.ecr
+++ b/src/invidious/views/embed.ecr
@@ -6,11 +6,12 @@
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="thumbnail" content="<%= thumbnail %>">
<%= rendered "components/player_sources" %>
- <link rel="stylesheet" href="/css/videojs-overlay.css?v=<%= ASSET_COMMIT %>">
- <script src="/js/videojs-overlay.min.js?v=<%= ASSET_COMMIT %>"></script>
+ <link rel="stylesheet" href="/videojs/videojs-overlay/videojs-overlay.css?v=<%= ASSET_COMMIT %>">
+ <script src="/videojs/videojs-overlay/videojs-overlay.js?v=<%= ASSET_COMMIT %>"></script>
<link rel="stylesheet" href="/css/default.css?v=<%= ASSET_COMMIT %>">
<link rel="stylesheet" href="/css/embed.css?v=<%= ASSET_COMMIT %>">
<title><%= HTML.escape(video.title) %> - Invidious</title>
+ <script src="/js/_helpers.js?v=<%= ASSET_COMMIT %>"></script>
</head>
<body class="dark-theme">
@@ -24,7 +25,8 @@
"video_series" => video_series,
"params" => params,
"preferences" => preferences,
- "premiere_timestamp" => video.premiere_timestamp.try &.to_unix
+ "premiere_timestamp" => video.premiere_timestamp.try &.to_unix,
+ "local_disabled" => CONFIG.disabled?("local")
}.to_pretty_json
%>
</script>
diff --git a/src/invidious/views/error.ecr b/src/invidious/views/error.ecr
index d0752e5b..04eb74d5 100644
--- a/src/invidious/views/error.ecr
+++ b/src/invidious/views/error.ecr
@@ -4,4 +4,5 @@
<div class="h-box">
<%= error_message %>
+ <%= next_steps %>
</div>
diff --git a/src/invidious/views/feeds/history.ecr b/src/invidious/views/feeds/history.ecr
new file mode 100644
index 00000000..bda4e1f3
--- /dev/null
+++ b/src/invidious/views/feeds/history.ecr
@@ -0,0 +1,59 @@
+<% content_for "header" do %>
+<title><%= translate(locale, "History") %> - Invidious</title>
+<% end %>
+
+<div class="pure-g h-box">
+ <div class="pure-u-1-3">
+ <h3><%= translate_count(locale, "generic_videos_count", user.watched.size, NumberFormatting::HtmlSpan) %></h3>
+ </div>
+ <div class="pure-u-1-3">
+ <h3 style="text-align:center">
+ <a href="/feed/subscriptions"><%= translate_count(locale, "generic_subscriptions_count", user.subscriptions.size, NumberFormatting::HtmlSpan) %></a>
+ </h3>
+ </div>
+ <div class="pure-u-1-3">
+ <h3 style="text-align:right">
+ <a href="/clear_watch_history"><%= translate(locale, "Clear watch history") %></a>
+ </h3>
+ </div>
+</div>
+
+<script id="watched_data" type="application/json">
+<%=
+{
+ "csrf_token" => URI.encode_www_form(env.get?("csrf_token").try &.as(String) || "")
+}.to_pretty_json
+%>
+</script>
+<script src="/js/watched_widget.js"></script>
+
+<div class="pure-g">
+ <% watched.each do |item| %>
+ <div class="pure-u-1 pure-u-md-1-4">
+ <div class="h-box">
+ <div class="thumbnail">
+ <a style="width:100%" href="/watch?v=<%= item %>">
+ <img class="thumbnail" src="/vi/<%= item %>/mqdefault.jpg" alt="" />
+ </a>
+
+ <div class="top-left-overlay"><div class="watched">
+ <form data-onsubmit="return_false" action="/watch_ajax?action_mark_unwatched=1&id=<%= item %>&referer=<%= env.get("current_page") %>" method="post">
+ <input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(env.get?("csrf_token").try &.as(String) || "") %>">
+ <button type="submit" class="pure-button pure-button-secondary low-profile"
+ data-onclick="mark_unwatched" data-id="<%= item %>"><i class="icon ion-md-trash"></i></button>
+ </form>
+ </div></div>
+ </div>
+ <p></p>
+ </div>
+ </div>
+ <% end %>
+</div>
+
+<%=
+ IV::Frontend::Pagination.nav_numeric(locale,
+ base_url: base_url,
+ current_page: page,
+ show_next: (watched.size >= max_results)
+ )
+%>
diff --git a/src/invidious/views/feeds/playlists.ecr b/src/invidious/views/feeds/playlists.ecr
new file mode 100644
index 00000000..2a4b6edd
--- /dev/null
+++ b/src/invidious/views/feeds/playlists.ecr
@@ -0,0 +1,43 @@
+<% content_for "header" do %>
+<title><%= translate(locale, "Playlists") %> - Invidious</title>
+<% end %>
+
+<%= rendered "components/feed_menu" %>
+
+<div class="pure-g h-box">
+ <div class="pure-u-1-3">
+ <h3><%= translate(locale, "user_created_playlists", %(<span id="count">#{items_created.size}</span>)) %></h3>
+ </div>
+ <div class="pure-u-1-3">
+ <h3 style="text-align:center">
+ <a href="/create_playlist?referer=<%= URI.encode_www_form("/feed/playlists") %>"><%= translate(locale, "Create playlist") %></a>
+ </h3>
+ </div>
+ <div class="pure-u-1-3">
+ <h3 style="text-align:right">
+ <a href="/data_control?referer=<%= URI.encode_www_form("/feed/playlists") %>">
+ <%= translate(locale, "Import/export") %>
+ </a>
+ </h3>
+ </div>
+</div>
+
+<div class="pure-g">
+<% items_created.each do |item| %>
+ <%= rendered "components/item" %>
+<% end %>
+</div>
+
+<div class="pure-g h-box">
+ <div class="pure-u-1">
+ <h3><%= translate(locale, "user_saved_playlists", %(<span id="count">#{items_saved.size}</span>)) %></h3>
+ </div>
+</div>
+
+<div class="pure-g">
+<% items_saved.each do |item| %>
+ <%= rendered "components/item" %>
+<% end %>
+</div>
+
+<script src="/js/watched_indicator.js"></script>
diff --git a/src/invidious/views/popular.ecr b/src/invidious/views/feeds/popular.ecr
index 62abb12a..5fbe539c 100644
--- a/src/invidious/views/popular.ecr
+++ b/src/invidious/views/feeds/popular.ecr
@@ -12,9 +12,9 @@
<%= rendered "components/feed_menu" %>
<div class="pure-g">
- <% popular_videos.each_slice(4) do |slice| %>
- <% slice.each do |item| %>
- <%= rendered "components/item" %>
- <% end %>
- <% end %>
+<% popular_videos.each do |item| %>
+ <%= rendered "components/item" %>
+<% end %>
</div>
+
+<script src="/js/watched_indicator.js"></script>
diff --git a/src/invidious/views/feeds/subscriptions.ecr b/src/invidious/views/feeds/subscriptions.ecr
new file mode 100644
index 00000000..c36bd00f
--- /dev/null
+++ b/src/invidious/views/feeds/subscriptions.ecr
@@ -0,0 +1,74 @@
+<% content_for "header" do %>
+<title><%= translate(locale, "Subscriptions") %> - Invidious</title>
+<link rel="alternate" type="application/rss+xml" title="RSS" href="/feed/private?token=<%= token %>" />
+<% end %>
+
+<%= rendered "components/feed_menu" %>
+
+<div class="pure-g h-box">
+ <div class="pure-u-1-3">
+ <h3>
+ <a href="/subscription_manager"><%= translate(locale, "Manage subscriptions") %></a>
+ </h3>
+ </div>
+ <div class="pure-u-1-3">
+ <h3 style="text-align:center">
+ <a href="/feed/history"><%= translate(locale, "Watch history") %></a>
+ </h3>
+ </div>
+ <div class="pure-u-1-3">
+ <h3 style="text-align:right">
+ <a href="/feed/private?token=<%= token %>"><i class="icon ion-logo-rss"></i></a>
+ </h3>
+ </div>
+</div>
+
+<% if CONFIG.enable_user_notifications %>
+
+<center>
+ <%= translate_count(locale, "subscriptions_unseen_notifs_count", notifications.size) %>
+</center>
+
+<% if !notifications.empty? %>
+ <div class="h-box">
+ <hr>
+ </div>
+<% end %>
+
+<div class="pure-g">
+<% notifications.each do |item| %>
+ <%= rendered "components/item" %>
+<% end %>
+</div>
+
+<% end %>
+
+<div class="h-box">
+ <hr>
+</div>
+
+<script id="watched_data" type="application/json">
+<%=
+{
+ "csrf_token" => URI.encode_www_form(env.get?("csrf_token").try &.as(String) || "")
+}.to_pretty_json
+%>
+</script>
+<script src="/js/watched_widget.js"></script>
+
+
+<div class="pure-g">
+<% videos.each do |item| %>
+ <%= rendered "components/item" %>
+<% end %>
+</div>
+
+<script src="/js/watched_indicator.js"></script>
+
+<%=
+ IV::Frontend::Pagination.nav_numeric(locale,
+ base_url: base_url,
+ current_page: page,
+ show_next: ((videos.size + notifications.size) == max_results)
+ )
+%>
diff --git a/src/invidious/views/trending.ecr b/src/invidious/views/feeds/trending.ecr
index 3ec62555..7dc416c6 100644
--- a/src/invidious/views/trending.ecr
+++ b/src/invidious/views/feeds/trending.ecr
@@ -41,9 +41,9 @@
</div>
<div class="pure-g">
- <% trending.each_slice(4) do |slice| %>
- <% slice.each do |item| %>
- <%= rendered "components/item" %>
- <% end %>
- <% end %>
+<% trending.each do |item| %>
+ <%= rendered "components/item" %>
+<% end %>
</div>
+
+<script src="/js/watched_indicator.js"></script>
diff --git a/src/invidious/views/hashtag.ecr b/src/invidious/views/hashtag.ecr
new file mode 100644
index 00000000..2000337e
--- /dev/null
+++ b/src/invidious/views/hashtag.ecr
@@ -0,0 +1,8 @@
+<% content_for "header" do %>
+<title><%= HTML.escape(hashtag) %> - Invidious</title>
+<% end %>
+
+<hr/>
+
+
+<%= rendered "components/items_paginated" %>
diff --git a/src/invidious/views/history.ecr b/src/invidious/views/history.ecr
deleted file mode 100644
index fe8c70b9..00000000
--- a/src/invidious/views/history.ecr
+++ /dev/null
@@ -1,75 +0,0 @@
-<% content_for "header" do %>
-<title><%= translate(locale, "History") %> - Invidious</title>
-<% end %>
-
-<div class="pure-g h-box">
- <div class="pure-u-1-3">
- <h3><%= translate(locale, "`x` videos", %(<span id="count">#{user.watched.size}</span>)) %></h3>
- </div>
- <div class="pure-u-1-3" style="text-align:center">
- <h3>
- <a href="/feed/subscriptions"><%= translate(locale, "`x` subscriptions", %(<span id="count">#{user.subscriptions.size}</span>)) %></a>
- </h3>
- </div>
- <div class="pure-u-1-3" style="text-align:right">
- <h3>
- <a href="/clear_watch_history"><%= translate(locale, "Clear watch history") %></a>
- </h3>
- </div>
-</div>
-
-<script id="watched_data" type="application/json">
-<%=
-{
- "csrf_token" => URI.encode_www_form(env.get?("csrf_token").try &.as(String) || "")
-}.to_pretty_json
-%>
-</script>
-<script src="/js/watched_widget.js"></script>
-
-<div class="pure-g">
- <% watched.each_slice(4) do |slice| %>
- <% slice.each do |item| %>
- <div class="pure-u-1 pure-u-md-1-4">
- <div class="h-box">
- <a style="width:100%" href="/watch?v=<%= item %>">
- <% if !env.get("preferences").as(Preferences).thin_mode %>
- <div class="thumbnail">
- <img class="thumbnail" src="/vi/<%= item %>/mqdefault.jpg"/>
- <form data-onsubmit="return_false" action="/watch_ajax?action_mark_unwatched=1&id=<%= item %>&referer=<%= env.get("current_page") %>" method="post">
- <input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(env.get?("csrf_token").try &.as(String) || "") %>">
- <p class="watched">
- <a data-onclick="mark_unwatched" data-id="<%= item %>" href="javascript:void(0)">
- <button type="submit" style="all:unset">
- <i class="icon ion-md-trash"></i>
- </button>
- </a>
- </p>
- </form>
- </div>
- <p></p>
- <% end %>
- </a>
- </div>
- </div>
- <% end %>
- <% end %>
-</div>
-
-<div class="pure-g h-box">
- <div class="pure-u-1 pure-u-lg-1-5">
- <% if page > 1 %>
- <a href="/feed/history?page=<%= page - 1 %><% if env.params.query["max_results"]? %>&max_results=<%= max_results %><% end %>">
- <%= translate(locale, "Previous page") %>
- </a>
- <% end %>
- </div>
- <div class="pure-u-1 pure-u-lg-3-5"></div>
- <div class="pure-u-1 pure-u-lg-1-5" style="text-align:right">
- <% if watched.size >= max_results %>
- <a href="/feed/history?page=<%= page + 1 %><% if env.params.query["max_results"]? %>&max_results=<%= max_results %><% end %>">
- <%= translate(locale, "Next page") %>
- </a>
- <% end %>
- </div>
-</div>
diff --git a/src/invidious/views/licenses.ecr b/src/invidious/views/licenses.ecr
index c2ada992..667cfa37 100644
--- a/src/invidious/views/licenses.ecr
+++ b/src/invidious/views/licenses.ecr
@@ -11,6 +11,34 @@
<table id="jslicense-labels1">
<tr>
<td>
+ <a href="/js/_helpers.js?v=<%= ASSET_COMMIT %>">_helpers.js</a>
+ </td>
+
+ <td>
+ <a href="https://www.gnu.org/licenses/agpl-3.0.html">AGPL-3.0</a>
+ </td>
+
+ <td>
+ <a href="/js/_helpers.js?v=<%= ASSET_COMMIT %>"><%= translate(locale, "source") %></a>
+ </td>
+ </tr>
+
+ <tr>
+ <td>
+ <a href="/js/handlers.js?v=<%= ASSET_COMMIT %>">handlers.js</a>
+ </td>
+
+ <td>
+ <a href="https://www.gnu.org/licenses/agpl-3.0.html">AGPL-3.0</a>
+ </td>
+
+ <td>
+ <a href="/js/handlers.js?v=<%= ASSET_COMMIT %>"><%= translate(locale, "source") %></a>
+ </td>
+ </tr>
+
+ <tr>
+ <td>
<a href="/js/community.js?v=<%= ASSET_COMMIT %>">community.js</a>
</td>
@@ -75,7 +103,7 @@
</td>
<td>
- <a href="https://github.com/omarroth/videojs-quality-selector"><%= translate(locale, "source") %></a>
+ <a href="https://github.com/iv-org/videojs-quality-selector"><%= translate(locale, "source") %></a>
</td>
</tr>
@@ -123,7 +151,7 @@
<tr>
<td>
- <a href="/js/videojs-contrib-quality-levels.min.js?v=<%= ASSET_COMMIT %>">videojs-contrib-quality-levels.min.js</a>
+ <a href="/videojs/videojs-contrib-quality-levels/videojs-contrib-quality-levels.js?v=<%= ASSET_COMMIT %>">videojs-contrib-quality-levels.js</a>
</td>
<td>
@@ -137,7 +165,7 @@
<tr>
<td>
- <a href="/js/videojs-http-source-selector.min.js?v=<%= ASSET_COMMIT %>">videojs-http-source-selector.min.js</a>
+ <a href="/videojs/videojs-http-source-selector/videojs-http-source-selector.js?v=<%= ASSET_COMMIT %>">videojs-http-source-selector.js</a>
</td>
<td>
@@ -151,11 +179,11 @@
<tr>
<td>
- <a href="/js/videojs-mobile-ui.min.js?v=<%= ASSET_COMMIT %>">videojs-mobile-ui.min.js</a>
+ <a href="/videojs/videojs-mobile-ui/videojs-mobile-ui.js?v=<%= ASSET_COMMIT %>">videojs-mobile-ui.js</a>
</td>
<td>
- <a href="https://choosealicense.com/licenses/mit/">MIT</a>
+ <a href="https://choosealicense.com/licenses/mit/">Expat</a>
</td>
<td>
@@ -165,7 +193,7 @@
<tr>
<td>
- <a href="/js/videojs-markers.min.js?v=<%= ASSET_COMMIT %>">videojs-markers.min.js</a>
+ <a href="/videojs/videojs-markers/videojs-markers.js?v=<%= ASSET_COMMIT %>">videojs-markers.js</a>
</td>
<td>
@@ -179,7 +207,7 @@
<tr>
<td>
- <a href="/js/videojs-overlay.min.js?v=<%= ASSET_COMMIT %>">videojs-overlay.min.js</a>
+ <a href="/videojs/videojs-overlay/videojs-overlay.js?v=<%= ASSET_COMMIT %>">videojs-overlay.js</a>
</td>
<td>
@@ -193,7 +221,7 @@
<tr>
<td>
- <a href="/js/videojs-share.min.js?v=<%= ASSET_COMMIT %>">videojs-share.min.js</a>
+ <a href="/videojs/videojs-share/videojs-share.js?v=<%= ASSET_COMMIT %>">videojs-share.js</a>
</td>
<td>
@@ -207,7 +235,7 @@
<tr>
<td>
- <a href="/js/videojs-vtt-thumbnails.min.js?v=<%= ASSET_COMMIT %>">videojs-vtt-thumbnails.min.js</a>
+ <a href="/videojs/videojs-vtt-thumbnails/videojs-vtt-thumbnails.js?v=<%= ASSET_COMMIT %>">videojs-vtt-thumbnails.js</a>
</td>
<td>
@@ -215,7 +243,7 @@
</td>
<td>
- <a href="https://github.com/omarroth/videojs-vtt-thumbnails"><%= translate(locale, "source") %></a>
+ <a href="https://github.com/chrisboustead/videojs-vtt-thumbnails"><%= translate(locale, "source") %></a>
</td>
</tr>
@@ -235,7 +263,21 @@
<tr>
<td>
- <a href="/js/video.min.js?v=<%= ASSET_COMMIT %>">video.min.js</a>
+ <a href="/videojs/videojs-vr/videojs-vr.js?v=<%= ASSET_COMMIT %>">videojs-vr.js</a>
+ </td>
+
+ <td>
+ <a href="https://choosealicense.com/licenses/mit">Expat</a>
+ </td>
+
+ <td>
+ <a href="https://github.com/videojs/videojs-vr"><%= translate(locale, "source") %></a>
+ </td>
+ </tr>
+
+ <tr>
+ <td>
+ <a href="/videojs/video.js/video.js?v=<%= ASSET_COMMIT %>">video.js</a>
</td>
<td>
diff --git a/src/invidious/views/mix.ecr b/src/invidious/views/mix.ecr
index e9c0dcbc..e55b00f8 100644
--- a/src/invidious/views/mix.ecr
+++ b/src/invidious/views/mix.ecr
@@ -1,22 +1,20 @@
<% content_for "header" do %>
-<title><%= mix.title %> - Invidious</title>
+<title><%= HTML.escape(mix.title) %> - Invidious</title>
<% end %>
<div class="pure-g h-box">
<div class="pure-u-2-3">
- <h3><%= mix.title %></h3>
+ <h3><%= HTML.escape(mix.title) %></h3>
</div>
- <div class="pure-u-1-3" style="text-align:right">
- <h3>
+ <div class="pure-u-1-3">
+ <h3 style="text-align:right">
<a href="/feed/playlist/<%= mix.id %>"><i class="icon ion-logo-rss"></i></a>
</h3>
</div>
</div>
<div class="pure-g">
- <% mix.videos.each_slice(4) do |slice| %>
- <% slice.each do |item| %>
- <%= rendered "components/item" %>
- <% end %>
- <% end %>
+<% mix.videos.each do |item| %>
+ <%= rendered "components/item" %>
+<% end %>
</div>
diff --git a/src/invidious/views/playlist.ecr b/src/invidious/views/playlist.ecr
index 91156028..c27ddba6 100644
--- a/src/invidious/views/playlist.ecr
+++ b/src/invidious/views/playlist.ecr
@@ -1,19 +1,63 @@
+<% title = HTML.escape(playlist.title) %>
+<% author = HTML.escape(playlist.author) %>
+
<% content_for "header" do %>
-<title><%= playlist.title %> - Invidious</title>
+<title><%= title %> - Invidious</title>
<link rel="alternate" type="application/rss+xml" title="RSS" href="/feed/playlist/<%= plid %>" />
<% end %>
-<div class="pure-g h-box">
- <div class="pure-u-2-3">
- <h3><%= playlist.title %></h3>
+<div class="h-box flexible title">
+ <div class="flex-left"><h3><%= title %></h3></div>
+
+ <div class="flex-right button-container">
+ <%- if playlist.is_a?(InvidiousPlaylist) && playlist.author == user.try &.email -%>
+ <div class="pure-u">
+ <a class="pure-button pure-button-secondary low-profile" dir="auto" href="/add_playlist_items?list=<%= plid %>">
+ <i class="icon ion-md-add"></i>&nbsp;<%= translate(locale, "playlist_button_add_items") %>
+ </a>
+ </div>
+ <div class="pure-u">
+ <a class="pure-button pure-button-secondary low-profile" dir="auto" href="/edit_playlist?list=<%= plid %>">
+ <i class="icon ion-md-create"></i>&nbsp;<%= translate(locale, "generic_button_edit") %>
+ </a>
+ </div>
+ <div class="pure-u">
+ <a class="pure-button pure-button-secondary low-profile" dir="auto" href="/delete_playlist?list=<%= plid %>">
+ <i class="icon ion-md-trash"></i>&nbsp;<%= translate(locale, "generic_button_delete") %>
+ </a>
+ </div>
+ <%- else -%>
+ <div class="pure-u">
+ <%- if IV::Database::Playlists.exists?(playlist.id) -%>
+ <a class="pure-button pure-button-secondary low-profile" dir="auto" href="/subscribe_playlist?list=<%= plid %>">
+ <i class="icon ion-md-add"></i>&nbsp;<%= translate(locale, "Subscribe") %>
+ </a>
+ <%- else -%>
+ <a class="pure-button pure-button-secondary low-profile" dir="auto" href="/delete_playlist?list=<%= plid %>">
+ <i class="icon ion-md-trash"></i>&nbsp;<%= translate(locale, "Unsubscribe") %>
+ </a>
+ <%- end -%>
+ </div>
+ <%- end -%>
+
+ <div class="pure-u">
+ <a class="pure-button pure-button-secondary low-profile" dir="auto" href="/feed/playlist/<%= plid %>">
+ <i class="icon ion-logo-rss"></i>&nbsp;<%= translate(locale, "generic_button_rss") %>
+ </a>
+ </div>
+ </div>
+</div>
+
+<div class="h-box">
+ <div class="pure-u-1-1">
<% if playlist.is_a? InvidiousPlaylist %>
<b>
<% if playlist.author == user.try &.email %>
- <a href="/view_all_playlists"><%= playlist.author %></a> |
+ <a href="/feed/playlists"><%= author %></a> |
<% else %>
- <%= playlist.author %> |
+ <%= author %> |
<% end %>
- <%= translate(locale, "`x` videos", "#{playlist.video_count}") %> |
+ <%= translate_count(locale, "generic_videos_count", playlist.video_count) %> |
<%= translate(locale, "Updated `x` ago", recode_date(playlist.updated, locale)) %> |
<% case playlist.as(InvidiousPlaylist).privacy when %>
<% when PlaylistPrivacy::Public %>
@@ -26,50 +70,42 @@
</b>
<% else %>
<b>
- <a href="/channel/<%= playlist.ucid %>"><%= playlist.author %></a> |
- <%= translate(locale, "`x` videos", "#{playlist.video_count}") %> |
+ <% if !author.empty? %>
+ <a href="/channel/<%= playlist.ucid %>"><%= author %></a> |
+ <% elsif !playlist.subtitle.nil? %>
+ <% subtitle = playlist.subtitle || "" %>
+ <span><%= HTML.escape(subtitle[0..subtitle.rindex(" • ") || subtitle.size]) %></span> |
+ <% end %>
+ <%= translate_count(locale, "generic_videos_count", playlist.video_count) %> |
<%= translate(locale, "Updated `x` ago", recode_date(playlist.updated, locale)) %>
</b>
<% end %>
+
<% if !playlist.is_a? InvidiousPlaylist %>
<div class="pure-u-2-3">
- <a href="https://www.youtube.com/playlist?list=<%= playlist.id %>">
+ <a rel="noreferrer noopener" href="https://www.youtube.com/playlist?list=<%= playlist.id %>">
<%= translate(locale, "View playlist on YouTube") %>
</a>
- </div>
- <% end %>
- </div>
- <div class="pure-u-1-3" style="text-align:right">
- <h3>
- <div class="pure-g user-field">
- <% if playlist.is_a?(InvidiousPlaylist) && playlist.author == user.try &.email %>
- <div class="pure-u-1-3"><a href="/edit_playlist?list=<%= plid %>"><i class="icon ion-md-create"></i></a></div>
- <div class="pure-u-1-3"><a href="/delete_playlist?list=<%= plid %>"><i class="icon ion-md-trash"></i></a></div>
- <% else %>
- <% if PG_DB.query_one?("SELECT id FROM playlists WHERE id = $1", playlist.id, as: String).nil? %>
- <div class="pure-u-1-3"><a href="/subscribe_playlist?list=<%= plid %>"><i class="icon ion-md-add"></i></a></div>
+ <span> | </span>
+
+ <% if env.get("preferences").as(Preferences).automatic_instance_redirect%>
+ <a href="/redirect?referer=<%= env.get?("current_page") %>">
+ <%= translate(locale, "Switch Invidious Instance") %>
+ </a>
<% else %>
- <div class="pure-u-1-3"><a href="/delete_playlist?list=<%= plid %>"><i class="icon ion-md-trash"></i></a></div>
+ <a href="https://redirect.invidious.io/playlist?list=<%= playlist.id %>">
+ <%= translate(locale, "Switch Invidious Instance") %>
+ </a>
<% end %>
- <% end %>
- <div class="pure-u-1-3"><a href="/feed/playlist/<%= plid %>"><i class="icon ion-logo-rss"></i></a></div>
</div>
- </h3>
+ <% end %>
</div>
</div>
<div class="h-box">
- <p><%= playlist.description_html %></p>
+ <div id="descriptionWrapper"><%= playlist.description_html %></div>
</div>
-<% if playlist.is_a?(InvidiousPlaylist) && playlist.author == user.try &.email %>
-<div class="h-box" style="text-align:right">
- <h3>
- <a href="/add_playlist_items?list=<%= plid %>"><i class="icon ion-md-add"></i></a>
- </h3>
-</div>
-<% end %>
-
<div class="h-box">
<hr>
</div>
@@ -85,28 +121,5 @@
<script src="/js/playlist_widget.js?v=<%= ASSET_COMMIT %>"></script>
<% end %>
-<div class="pure-g">
- <% videos.each_slice(4) do |slice| %>
- <% slice.each do |item| %>
- <%= rendered "components/item" %>
- <% end %>
- <% end %>
-</div>
-<div class="pure-g h-box">
- <div class="pure-u-1 pure-u-lg-1-5">
- <% if page > 1 %>
- <a href="/playlist?list=<%= playlist.id %>&page=<%= page - 1 %>">
- <%= translate(locale, "Previous page") %>
- </a>
- <% end %>
- </div>
- <div class="pure-u-1 pure-u-lg-3-5"></div>
- <div class="pure-u-1 pure-u-lg-1-5" style="text-align:right">
- <% if page_count != 1 && page < page_count %>
- <a href="/playlist?list=<%= playlist.id %>&page=<%= page + 1 %>">
- <%= translate(locale, "Next page") %>
- </a>
- <% end %>
- </div>
-</div>
+<%= rendered "components/items_paginated" %>
diff --git a/src/invidious/views/playlists.ecr b/src/invidious/views/playlists.ecr
deleted file mode 100644
index 44bdb94d..00000000
--- a/src/invidious/views/playlists.ecr
+++ /dev/null
@@ -1,98 +0,0 @@
-<% content_for "header" do %>
-<title><%= channel.author %> - Invidious</title>
-<% end %>
-
-<% if channel.banner %>
- <div class="h-box">
- <img style="width:100%" src="/ggpht<%= URI.parse(channel.banner.not_nil!.gsub("=w1060-", "=w1280-")).request_target %>">
- </div>
-
- <div class="h-box">
- <hr>
- </div>
-<% end %>
-
-<div class="pure-g h-box">
- <div class="pure-u-2-3">
- <div class="channel-profile">
- <img src="/ggpht<%= URI.parse(channel.author_thumbnail).request_target %>">
- <span><%= channel.author %></span>
- </div>
- </div>
- <div class="pure-u-1-3" style="text-align:right">
- <h3>
- <a href="/feed/channel/<%= channel.ucid %>"><i class="icon ion-logo-rss"></i></a>
- </h3>
- </div>
-</div>
-
-<div class="h-box">
- <p><span style="white-space:pre-wrap"><%= XML.parse_html(channel.description_html).xpath_node(%q(.//pre)).try &.content if !channel.description_html.empty? %></span></p>
-</div>
-
-<div class="h-box">
- <% ucid = channel.ucid %>
- <% author = channel.author %>
- <% sub_count_text = number_to_short_text(channel.sub_count) %>
- <%= rendered "components/subscribe_widget" %>
-</div>
-
-<div class="pure-g h-box">
- <div class="pure-g pure-u-1-3">
- <div class="pure-u-1 pure-md-1-3">
- <a href="https://www.youtube.com/channel/<%= channel.ucid %>/playlists"><%= translate(locale, "View channel on YouTube") %></a>
- </div>
- <div class="pure-u-1 pure-md-1-3">
- <a href="/channel/<%= channel.ucid %>"><%= translate(locale, "Videos") %></a>
- </div>
- <div class="pure-u-1 pure-md-1-3">
- <% if !channel.auto_generated %>
- <b><%= translate(locale, "Playlists") %></b>
- <% end %>
- </div>
- <div class="pure-u-1 pure-md-1-3">
- <% if channel.tabs.includes? "community" %>
- <a href="/channel/<%= channel.ucid %>/community"><%= translate(locale, "Community") %></a>
- <% end %>
- </div>
- </div>
- <div class="pure-u-1-3"></div>
- <div class="pure-u-1-3">
- <div class="pure-g" style="text-align:right">
- <% {"last", "oldest", "newest"}.each do |sort| %>
- <div class="pure-u-1 pure-md-1-3">
- <% if sort_by == sort %>
- <b><%= translate(locale, sort) %></b>
- <% else %>
- <a href="/channel/<%= channel.ucid %>/playlists?sort_by=<%= sort %>">
- <%= translate(locale, sort) %>
- </a>
- <% end %>
- </div>
- <% end %>
- </div>
- </div>
-</div>
-
-<div class="h-box">
- <hr>
-</div>
-
-<div class="pure-g">
- <% items.each_slice(4) do |slice| %>
- <% slice.each do |item| %>
- <%= rendered "components/item" %>
- <% end %>
- <% end %>
-</div>
-
-<div class="pure-g h-box">
- <div class="pure-u-1 pure-u-md-4-5"></div>
- <div class="pure-u-1 pure-u-lg-1-5" style="text-align:right">
- <% if continuation %>
- <a href="/channel/<%= channel.ucid %>/playlists?continuation=<%= continuation %><% if sort_by != "last" %>&sort_by=<%= HTML.escape(sort_by) %><% end %>">
- <%= translate(locale, "Next page") %>
- </a>
- <% end %>
- </div>
-</div>
diff --git a/src/invidious/views/post.ecr b/src/invidious/views/post.ecr
new file mode 100644
index 00000000..fb03a44c
--- /dev/null
+++ b/src/invidious/views/post.ecr
@@ -0,0 +1,48 @@
+<% content_for "header" do %>
+<title>Invidious</title>
+<% end %>
+
+<div>
+ <div id="post" class="comments post-comments">
+ <%= IV::Frontend::Comments.template_youtube(post_response.not_nil!, locale, thin_mode) %>
+ </div>
+
+ <% if nojs %>
+ <hr>
+ <% end %>
+ <br />
+
+ <div id="comments" class="comments post-comments">
+ <% if nojs %>
+ <%= comment_html %>
+ <% else %>
+ <noscript>
+ <a href="/post/<%= id %>?ucid=<%= ucid %>&nojs=1">
+ <%= translate(locale, "Hi! Looks like you have JavaScript turned off. Click here to view comments, keep in mind they may take a bit longer to load.") %>
+ </a>
+ </noscript>
+ <% end %>
+ </div>
+</div>
+
+<script id="video_data" type="application/json">
+<%=
+{
+ "id" => id,
+ "youtube_comments_text" => HTML.escape(translate(locale, "View YouTube comments")),
+ "reddit_comments_text" => "",
+ "reddit_permalink_text" => "",
+ "comments_text" => HTML.escape(translate(locale, "View `x` comments", "{commentCount}")),
+ "hide_replies_text" => HTML.escape(translate(locale, "Hide replies")),
+ "show_replies_text" => HTML.escape(translate(locale, "Show replies")),
+ "params" => {
+ "comments": ["youtube"]
+ },
+ "preferences" => prefs,
+ "base_url" => "/api/v1/post/#{URI.encode_www_form(id)}/comments",
+ "ucid" => ucid
+}.to_pretty_json
+%>
+</script>
+<script src="/js/comments.js?v=<%= ASSET_COMMIT %>"></script>
+<script src="/js/post.js?v=<%= ASSET_COMMIT %>"></script> \ No newline at end of file
diff --git a/src/invidious/views/privacy.ecr b/src/invidious/views/privacy.ecr
index 643f880b..bc5ff40b 100644
--- a/src/invidious/views/privacy.ecr
+++ b/src/invidious/views/privacy.ecr
@@ -16,12 +16,11 @@
<li>a list of channel UCIDs the user is subscribed to</li>
<li>a user ID (for persistent storage of subscriptions and preferences)</li>
<li>a json object containing user preferences</li>
- <li>a hashed password if applicable (not present on google accounts)</li>
+ <li>a hashed password</li>
<li>a randomly generated token for providing an RSS feed of a user's subscriptions</li>
<li>a list of video IDs identifying watched videos</li>
</ul>
<p>Users can clear their watch history using the <a href="/clear_watch_history">clear watch history</a> page.</p>
- <p>If a user is logged in with a Google account, no password will ever be stored. This website uses the session token provided by Google to identify a user, but does not store the information required to make requests on a user's behalf without their knowledge or consent.</p>
<h3>Data you passively provide</h3>
<p>When you request any resource from this website (for example: a page, a font, an image, or an API endpoint) information about the request may be logged.</p>
diff --git a/src/invidious/views/search.ecr b/src/invidious/views/search.ecr
index fefc9fbb..b1300214 100644
--- a/src/invidious/views/search.ecr
+++ b/src/invidious/views/search.ecr
@@ -1,136 +1,21 @@
<% content_for "header" do %>
-<title><%= search_query.not_nil!.size > 30 ? HTML.escape(query.not_nil![0,30].rstrip(".") + "...") : HTML.escape(query.not_nil!) %> - Invidious</title>
+<title><%= query.text.size > 30 ? HTML.escape(query.text[0,30].rstrip(".")) + "&hellip;" : HTML.escape(query.text) %> - Invidious</title>
+<link rel="stylesheet" href="/css/search.css?v=<%= ASSET_COMMIT %>">
<% end %>
-<details id="filters">
- <summary>
- <h3 style="display:inline"> <%= translate(locale, "filter") %> </h3>
- </summary>
- <div id="filters" class="pure-g h-box">
- <div class="pure-u-1-3 pure-u-md-1-5">
- <b><%= translate(locale, "date") %></b>
- <hr/>
- <% ["hour", "today", "week", "month", "year"].each do |date| %>
- <div class="pure-u-1 pure-md-1-5">
- <% if operator_hash.fetch("date", "all") == date %>
- <b><%= translate(locale, date) %></b>
- <% else %>
- <a href="/search?q=<%= HTML.escape(query.not_nil!.gsub(/ ?date:[a-z]+/, "") + " date:" + date) %>&page=<%= page %>">
- <%= translate(locale, date) %>
- </a>
- <% end %>
- </div>
- <% end %>
- </div>
- <div class="pure-u-1-3 pure-u-md-1-5">
- <b><%= translate(locale, "content_type") %></b>
- <hr/>
- <% ["video", "channel", "playlist", "movie", "show"].each do |content_type| %>
- <div class="pure-u-1 pure-md-1-5">
- <% if operator_hash.fetch("content_type", "all") == content_type %>
- <b><%= translate(locale, content_type) %></b>
- <% else %>
- <a href="/search?q=<%= HTML.escape(query.not_nil!.gsub(/ ?content_type:[a-z]+/, "") + " content_type:" + content_type) %>&page=<%= page %>">
- <%= translate(locale, content_type) %>
- </a>
- <% end %>
- </div>
- <% end %>
- </div>
- <div class="pure-u-1-3 pure-u-md-1-5">
- <b><%= translate(locale, "duration") %></b>
- <hr/>
- <% ["short", "long"].each do |duration| %>
- <div class="pure-u-1 pure-md-1-5">
- <% if operator_hash.fetch("duration", "all") == duration %>
- <b><%= translate(locale, duration) %></b>
- <% else %>
- <a href="/search?q=<%= HTML.escape(query.not_nil!.gsub(/ ?duration:[a-z]+/, "") + " duration:" + duration) %>&page=<%= page %>">
- <%= translate(locale, duration) %>
- </a>
- <% end %>
- </div>
- <% end %>
- </div>
- <div class="pure-u-1-3 pure-u-md-1-5">
- <b><%= translate(locale, "features") %></b>
- <hr/>
- <% ["hd", "subtitles", "creative_commons", "3d", "live", "purchased", "4k", "360", "location", "hdr"].each do |feature| %>
- <div class="pure-u-1 pure-md-1-5">
- <% if operator_hash.fetch("features", "all").includes?(feature) %>
- <b><%= translate(locale, feature) %></b>
- <% elsif operator_hash.has_key?("features") %>
- <a href="/search?q=<%= HTML.escape(query.not_nil!.gsub(/features:/, "features:" + feature + ",")) %>&page=<%= page %>">
- <%= translate(locale, feature) %>
- </a>
- <% else %>
- <a href="/search?q=<%= HTML.escape(query.not_nil! + " features:" + feature) %>&page=<%= page %>">
- <%= translate(locale, feature) %>
- </a>
- <% end %>
- </div>
- <% end %>
- </div>
- <div class="pure-u-1-3 pure-u-md-1-5">
- <b><%= translate(locale, "sort") %></b>
- <hr/>
- <% ["relevance", "rating", "date", "views"].each do |sort| %>
- <div class="pure-u-1 pure-md-1-5">
- <% if operator_hash.fetch("sort", "relevance") == sort %>
- <b><%= translate(locale, sort) %></b>
- <% else %>
- <a href="/search?q=<%= HTML.escape(query.not_nil!.gsub(/ ?sort:[a-z]+/, "") + " sort:" + sort) %>&page=<%= page %>">
- <%= translate(locale, sort) %>
- </a>
- <% end %>
- </div>
- <% end %>
- </div>
- </div>
-</details>
-
+<!-- Search redirection and filtering UI -->
+<%= Invidious::Frontend::SearchFilters.generate(query.filters, query.text, query.page, locale) %>
<hr/>
-<div class="pure-g h-box v-box">
- <div class="pure-u-1 pure-u-lg-1-5">
- <% if page > 1 %>
- <a href="/search?q=<%= HTML.escape(query.not_nil!) %>&page=<%= page - 1 %>">
- <%= translate(locale, "Previous page") %>
- </a>
- <% end %>
- </div>
- <div class="pure-u-1 pure-u-lg-3-5"></div>
- <div class="pure-u-1 pure-u-lg-1-5" style="text-align:right">
- <% if count >= 20 %>
- <a href="/search?q=<%= HTML.escape(query.not_nil!) %>&page=<%= page + 1 %>">
- <%= translate(locale, "Next page") %>
- </a>
- <% end %>
- </div>
-</div>
-
-<div class="pure-g">
- <% videos.each_slice(4) do |slice| %>
- <% slice.each do |item| %>
- <%= rendered "components/item" %>
- <% end %>
- <% end %>
-</div>
-<div class="pure-g h-box">
- <div class="pure-u-1 pure-u-lg-1-5">
- <% if page > 1 %>
- <a href="/search?q=<%= HTML.escape(query.not_nil!) %>&page=<%= page - 1 %>">
- <%= translate(locale, "Previous page") %>
- </a>
- <% end %>
- </div>
- <div class="pure-u-1 pure-u-lg-3-5"></div>
- <div class="pure-u-1 pure-u-lg-1-5" style="text-align:right">
- <% if count >= 20 %>
- <a href="/search?q=<%= HTML.escape(query.not_nil!) %>&page=<%= page + 1 %>">
- <%= translate(locale, "Next page") %>
- </a>
- <% end %>
+<%- if items.empty? -%>
+<div class="h-box no-results-error">
+ <div>
+ <%= translate(locale, "search_message_no_results") %><br/><br/>
+ <%= translate(locale, "search_message_change_filters_or_query") %><br/><br/>
+ <%= translate(locale, "search_message_use_another_instance", redirect_url) %>
</div>
</div>
+<%- else -%>
+ <%= rendered "components/items_paginated" %>
+<%- end -%>
diff --git a/src/invidious/views/search_homepage.ecr b/src/invidious/views/search_homepage.ecr
index 8927c3f1..2424a1cf 100644
--- a/src/invidious/views/search_homepage.ecr
+++ b/src/invidious/views/search_homepage.ecr
@@ -1,7 +1,7 @@
<% content_for "header" do %>
<meta name="description" content="<%= translate(locale, "An alternative front-end to YouTube") %>">
<title>
- Invidious
+ Invidious - <%= translate(locale, "search") %>
</title>
<link rel="stylesheet" href="/css/empty.css?v=<%= ASSET_COMMIT %>">
<% end %>
@@ -14,11 +14,7 @@
</div>
<div class="pure-u-1-4"></div>
<div class="pure-u-1 pure-u-md-12-24 searchbar">
- <form class="pure-form" action="/search" method="get">
- <fieldset>
- <input autofocus type="search" style="width:100%" name="q" placeholder="<%= translate(locale, "search") %>" value="<%= env.get?("search").try {|x| HTML.escape(x.as(String)) } %>">
- </fieldset>
- </form>
+ <% autofocus = true %><%= rendered "components/search_box" %>
</div>
<div class="pure-u-1-4"></div>
</div>
diff --git a/src/invidious/views/subscriptions.ecr b/src/invidious/views/subscriptions.ecr
deleted file mode 100644
index af1d4fbc..00000000
--- a/src/invidious/views/subscriptions.ecr
+++ /dev/null
@@ -1,81 +0,0 @@
-<% content_for "header" do %>
-<title><%= translate(locale, "Subscriptions") %> - Invidious</title>
-<link rel="alternate" type="application/rss+xml" title="RSS" href="/feed/private?token=<%= token %>" />
-<% end %>
-
-<%= rendered "components/feed_menu" %>
-
-<div class="pure-g h-box">
- <div class="pure-u-1-3">
- <h3>
- <a href="/subscription_manager"><%= translate(locale, "Manage subscriptions") %></a>
- </h3>
- </div>
- <div class="pure-u-1-3" style="text-align:center">
- <h3>
- <a href="/feed/history"><%= translate(locale, "Watch history") %></a>
- </h3>
- </div>
- <div class="pure-u-1-3" style="text-align:right">
- <h3>
- <a href="/feed/private?token=<%= token %>"><i class="icon ion-logo-rss"></i></a>
- </h3>
- </div>
-</div>
-
-<center>
- <%= translate(locale, "`x` unseen notifications", "#{notifications.size}") %>
-</center>
-
-<% if !notifications.empty? %>
- <div class="h-box">
- <hr>
- </div>
-<% end %>
-
-<div class="pure-g">
- <% notifications.each_slice(4) do |slice| %>
- <% slice.each do |item| %>
- <%= rendered "components/item" %>
- <% end %>
- <% end %>
-</div>
-
-<div class="h-box">
- <hr>
-</div>
-
-<script id="watched_data" type="application/json">
-<%=
-{
- "csrf_token" => URI.encode_www_form(env.get?("csrf_token").try &.as(String) || "")
-}.to_pretty_json
-%>
-</script>
-<script src="/js/watched_widget.js"></script>
-
-<div class="pure-g">
- <% videos.each_slice(4) do |slice| %>
- <% slice.each do |item| %>
- <%= rendered "components/item" %>
- <% end %>
- <% end %>
-</div>
-
-<div class="pure-g h-box">
- <div class="pure-u-1 pure-u-lg-1-5">
- <% if page > 1 %>
- <a href="/feed/subscriptions?page=<%= page - 1 %><% if env.params.query["max_results"]? %>&max_results=<%= max_results %><% end %>">
- <%= translate(locale, "Previous page") %>
- </a>
- <% end %>
- </div>
- <div class="pure-u-1 pure-u-lg-3-5"></div>
- <div class="pure-u-1 pure-u-lg-1-5" style="text-align:right">
- <% if (videos.size + notifications.size) == max_results %>
- <a href="/feed/subscriptions?page=<%= page + 1 %><% if env.params.query["max_results"]? %>&max_results=<%= max_results %><% end %>">
- <%= translate(locale, "Next page") %>
- </a>
- <% end %>
- </div>
-</div>
diff --git a/src/invidious/views/template.ecr b/src/invidious/views/template.ecr
index 5b63bf1f..9904b4fc 100644
--- a/src/invidious/views/template.ecr
+++ b/src/invidious/views/template.ecr
@@ -1,5 +1,9 @@
+<%
+ locale = env.get("preferences").as(Preferences).locale
+ dark_mode = env.get("preferences").as(Preferences).dark_mode
+%>
<!DOCTYPE html>
-<html lang="<%= env.get("preferences").as(Preferences).locale %>">
+<html lang="<%= locale %>">
<head>
<meta charset="utf-8">
@@ -17,35 +21,29 @@
<link rel="stylesheet" href="/css/grids-responsive-min.css?v=<%= ASSET_COMMIT %>">
<link rel="stylesheet" href="/css/ionicons.min.css?v=<%= ASSET_COMMIT %>">
<link rel="stylesheet" href="/css/default.css?v=<%= ASSET_COMMIT %>">
+ <link rel="stylesheet" href="/css/carousel.css?v=<%= ASSET_COMMIT %>">
+ <script src="/js/_helpers.js?v=<%= ASSET_COMMIT %>"></script>
</head>
-<% locale = LOCALES[env.get("preferences").as(Preferences).locale]? %>
-<% dark_mode = env.get("preferences").as(Preferences).dark_mode %>
-
<body class="<%= dark_mode.blank? ? "no" : dark_mode %>-theme">
- <span style="display:none" id="dark_mode_pref"><%= env.get("preferences").as(Preferences).dark_mode %></span>
+ <span style="display:none" id="dark_mode_pref"><%= dark_mode %></span>
<div class="pure-g">
- <div class="pure-u-1 pure-u-md-2-24"></div>
- <div class="pure-u-1 pure-u-md-20-24", id="contents">
+ <div class="pure-u-1 pure-u-xl-20-24" id="contents">
<div class="pure-g navbar h-box">
<% if navbar_search %>
<div class="pure-u-1 pure-u-md-4-24">
<a href="/" class="index-link pure-menu-heading">Invidious</a>
</div>
<div class="pure-u-1 pure-u-md-12-24 searchbar">
- <form class="pure-form" action="/search" method="get">
- <fieldset>
- <input type="search" style="width:100%" name="q" placeholder="<%= translate(locale, "search") %>" value="<%= env.get?("search").try {|x| HTML.escape(x.as(String)) } %>">
- </fieldset>
- </form>
+ <% autofocus = false %><%= rendered "components/search_box" %>
</div>
<% end %>
<div class="pure-u-1 pure-u-md-8-24 user-field">
<% if env.get? "user" %>
<div class="pure-u-1-4">
- <a id="toggle_theme" href="/toggle_theme?referer=<%= env.get?("current_page") %>" class="pure-menu-heading">
- <% if env.get("preferences").as(Preferences).dark_mode == "dark" %>
+ <a id="toggle_theme" href="/toggle_theme?referer=<%= env.get?("current_page") %>" class="pure-menu-heading" title="<%= translate(locale, "toggle_theme") %>">
+ <% if dark_mode == "dark" %>
<i class="icon ion-ios-sunny"></i>
<% else %>
<i class="icon ion-ios-moon"></i>
@@ -54,8 +52,8 @@
</div>
<div class="pure-u-1-4">
<a id="notification_ticker" title="<%= translate(locale, "Subscriptions") %>" href="/feed/subscriptions" class="pure-menu-heading">
- <% notification_count = env.get("user").as(User).notifications.size %>
- <% if notification_count > 0 %>
+ <% notification_count = env.get("user").as(Invidious::User).notifications.size %>
+ <% if CONFIG.enable_user_notifications && notification_count > 0 %>
<span id="notification_count"><%= notification_count %></span> <i class="icon ion-ios-notifications"></i>
<% else %>
<i class="icon ion-ios-notifications-outline"></i>
@@ -67,9 +65,14 @@
<i class="icon ion-ios-cog"></i>
</a>
</div>
+ <% if env.get("preferences").as(Preferences).show_nick %>
+ <div class="pure-u-1-4" style="overflow: hidden; white-space: nowrap;">
+ <span id="user_name"><%= HTML.escape(env.get("user").as(Invidious::User).email) %></span>
+ </div>
+ <% end %>
<div class="pure-u-1-4">
<form action="/signout?referer=<%= env.get?("current_page") %>" method="post">
- <input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(env.get?("csrf_token").try &.as(String) || "") %>">
+ <input type="hidden" name="csrf_token" value="<%= HTML.escape(env.get?("csrf_token").try &.as(String) || "") %>">
<a class="pure-menu-heading" href="#">
<input style="all:unset" type="submit" value="<%= translate(locale, "Log out") %>">
</a>
@@ -77,8 +80,8 @@
</div>
<% else %>
<div class="pure-u-1-3">
- <a id="toggle_theme" href="/toggle_theme?referer=<%= env.get?("current_page") %>" class="pure-menu-heading">
- <% if env.get("preferences").as(Preferences).dark_mode == "dark" %>
+ <a id="toggle_theme" href="/toggle_theme?referer=<%= env.get?("current_page") %>" class="pure-menu-heading" title="<%= translate(locale, "toggle_theme") %>">
+ <% if dark_mode == "dark" %>
<i class="icon ion-ios-sunny"></i>
<% else %>
<i class="icon ion-ios-moon"></i>
@@ -112,40 +115,46 @@
<footer>
<div class="pure-g">
<div class="pure-u-1 pure-u-md-1-3">
- <a href="https://github.com/iv-org/invidious">
- <%= translate(locale, "Released under the AGPLv3 by Omar Roth.") %>
- </a>
- </div>
- <div class="pure-u-1 pure-u-md-1-3">
- <i class="icon ion-ios-wallet"></i>
- BTC: <a href="bitcoin:bc1qfhe7rq3lqzuayzjxzyt9waz9ytrs09kla3tsgr">bc1qfhe7rq3lqzuayzjxzyt9waz9ytrs09kla3tsgr</a>
- </div>
- <div class="pure-u-1 pure-u-md-1-3">
- <i class="icon ion-ios-wallet"></i>
- XMR: <a href="monero:41nMCtek197boJtiUvGnTFYMatrLEpnpkQDmUECqx5Es2uX3sTKKWVhSL76suXsG3LXqkEJBrCZBgPTwJrDp1FrZJfycGPR">Click here</a>
- </div>
- <div class="pure-u-1 pure-u-md-1-3">
- <a href="https://github.com/iv-org/documentation">Documentation</a>
+ <span>
+ <i class="icon ion-logo-github"></i>
+ <% if CONFIG.modified_source_code_url %>
+ <a href="https://github.com/iv-org/invidious"><%= translate(locale, "footer_original_source_code") %></a>&nbsp;/
+ <a href="<%= CONFIG.modified_source_code_url %>"><%= translate(locale, "footer_modfied_source_code") %></a>
+ <% else %>
+ <a href="https://github.com/iv-org/invidious"><%= translate(locale, "footer_source_code") %></a>
+ <% end %>
+ </span>
+ <span>
+ <i class="icon ion-ios-paper"></i>
+ <a href="https://github.com/iv-org/documentation"><%= translate(locale, "footer_documentation") %></a>
+ </span>
</div>
+
<div class="pure-u-1 pure-u-md-1-3">
- <i class="icon ion-logo-javascript"></i>
- <a rel="jslicense" href="/licenses">
- <%= translate(locale, "View JavaScript license information.") %>
- </a>
- /
- <i class="icon ion-ios-paper"></i>
- <a href="/privacy">
- <%= translate(locale, "View privacy policy.") %>
- </a>
+ <span>
+ <a href="https://github.com/iv-org/invidious/blob/master/LICENSE"><%= translate(locale, "Released under the AGPLv3 on Github.") %></a>
+ </span>
+ <span>
+ <i class="icon ion-logo-javascript"></i>
+ <a rel="jslicense" href="/licenses"><%= translate(locale, "View JavaScript license information.") %></a>
+ </span>
+ <span>
+ <i class="icon ion-ios-paper"></i>
+ <a href="/privacy"><%= translate(locale, "View privacy policy.") %></a>
+ </span>
</div>
+
<div class="pure-u-1 pure-u-md-1-3">
- <i class="icon ion-logo-github"></i>
- <%= translate(locale, "Current version: ") %> <%= CURRENT_VERSION %>-<%= CURRENT_COMMIT %> @ <%= CURRENT_BRANCH %>
+ <span>
+ <i class="icon ion-ios-wallet"></i>
+ <a href="https://invidious.io/donate/"><%= translate(locale, "footer_donate_page") %></a>
+ </span>
+ <span><%= translate(locale, "Current version: ") %> <%= CURRENT_VERSION %>-<%= CURRENT_COMMIT %> @ <%= CURRENT_BRANCH %></span>
</div>
</div>
</footer>
+
</div>
- <div class="pure-u-1 pure-u-md-2-24"></div>
</div>
<script src="/js/handlers.js?v=<%= ASSET_COMMIT %>"></script>
<script src="/js/themes.js?v=<%= ASSET_COMMIT %>"></script>
@@ -159,7 +168,9 @@
}.to_pretty_json
%>
</script>
+ <% if CONFIG.enable_user_notifications %>
<script src="/js/notifications.js?v=<%= ASSET_COMMIT %>"></script>
+ <% end %>
<% end %>
</body>
diff --git a/src/invidious/views/authorize_token.ecr b/src/invidious/views/user/authorize_token.ecr
index 8ea99010..725f392e 100644
--- a/src/invidious/views/authorize_token.ecr
+++ b/src/invidious/views/user/authorize_token.ecr
@@ -9,13 +9,13 @@
<%= translate(locale, "Token") %>
</h3>
</div>
- <div class="pure-u-1-3" style="text-align:center">
- <h3>
+ <div class="pure-u-1-3">
+ <h3 style="text-align:center">
<a href="/token_manager"><%= translate(locale, "Token manager") %></a>
</h3>
</div>
- <div class="pure-u-1-3" style="text-align:right">
- <h3>
+ <div class="pure-u-1-3">
+ <h3 style="text-align:right">
<a href="/preferences"><%= translate(locale, "Preferences") %></a>
</h3>
</div>
@@ -72,7 +72,7 @@
<input type="hidden" name="expire" value="<%= expire %>">
<% end %>
- <input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(csrf_token) %>">
+ <input type="hidden" name="csrf_token" value="<%= HTML.escape(csrf_token) %>">
</form>
</div>
<% end %>
diff --git a/src/invidious/views/change_password.ecr b/src/invidious/views/user/change_password.ecr
index fb558f1d..1b9eb82e 100644
--- a/src/invidious/views/change_password.ecr
+++ b/src/invidious/views/user/change_password.ecr
@@ -23,7 +23,7 @@
<%= translate(locale, "Change password") %>
</button>
- <input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(csrf_token) %>">
+ <input type="hidden" name="csrf_token" value="<%= HTML.escape(csrf_token) %>">
</fieldset>
</form>
</div>
diff --git a/src/invidious/views/clear_watch_history.ecr b/src/invidious/views/user/clear_watch_history.ecr
index 5f9d1032..c9acbe44 100644
--- a/src/invidious/views/clear_watch_history.ecr
+++ b/src/invidious/views/user/clear_watch_history.ecr
@@ -19,6 +19,6 @@
</div>
</div>
- <input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(csrf_token) %>">
+ <input type="hidden" name="csrf_token" value="<%= HTML.escape(csrf_token) %>">
</form>
</div>
diff --git a/src/invidious/views/data_control.ecr b/src/invidious/views/user/data_control.ecr
index 74ccc06c..9ce42c99 100644
--- a/src/invidious/views/data_control.ecr
+++ b/src/invidious/views/user/data_control.ecr
@@ -8,13 +8,13 @@
<legend><%= translate(locale, "Import") %></legend>
<div class="pure-control-group">
- <label for="import_youtube"><%= translate(locale, "Import Invidious data") %></label>
+ <label for="import_invidious"><%= translate(locale, "Import Invidious data") %></label>
<input type="file" id="import_invidious" name="import_invidious">
</div>
<div class="pure-control-group">
<label for="import_youtube">
- <a rel="noopener" target="_blank" href="https://github.com/iv-org/documentation/blob/master/Export-YouTube-subscriptions.md">
+ <a rel="noopener" target="_blank" href="https://github.com/iv-org/documentation/blob/master/docs/export-youtube-subscriptions.md">
<%= translate(locale, "Import YouTube subscriptions") %>
</a>
</label>
@@ -22,6 +22,16 @@
</div>
<div class="pure-control-group">
+ <label for="import_youtube_pl"><%= translate(locale, "Import YouTube playlist (.csv)") %></label>
+ <input type="file" id="import_youtube_pl" name="import_youtube_pl">
+ </div>
+
+ <div class="pure-control-group">
+ <label for="import_youtube_wh"><%= translate(locale, "Import YouTube watch history (.json)") %></label>
+ <input type="file" id="import_youtube_wh" name="import_youtube_wh">
+ </div>
+
+ <div class="pure-control-group">
<label for="import_freetube"><%= translate(locale, "Import FreeTube subscriptions (.db)") %></label>
<input type="file" id="import_freetube" name="import_freetube">
</div>
diff --git a/src/invidious/views/delete_account.ecr b/src/invidious/views/user/delete_account.ecr
index 9103d5b7..67351bbf 100644
--- a/src/invidious/views/delete_account.ecr
+++ b/src/invidious/views/user/delete_account.ecr
@@ -19,6 +19,6 @@
</div>
</div>
- <input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(csrf_token) %>">
+ <input type="hidden" name="csrf_token" value="<%= HTML.escape(csrf_token) %>">
</form>
</div>
diff --git a/src/invidious/views/login.ecr b/src/invidious/views/user/login.ecr
index b6e8117b..2b03d280 100644
--- a/src/invidious/views/login.ecr
+++ b/src/invidious/views/user/login.ecr
@@ -6,63 +6,12 @@
<div class="pure-u-1 pure-u-lg-1-5"></div>
<div class="pure-u-1 pure-u-lg-3-5">
<div class="h-box">
- <div class="pure-g">
- <div class="pure-u-1-2">
- <a class="pure-button <% if account_type == "invidious" %>pure-button-disabled<% end %>" href="/login?type=invidious">
- <%= translate(locale, "Log in/register") %>
- </a>
- </div>
- <div class="pure-u-1-2">
- <a class="pure-button <% if account_type == "google" %>pure-button-disabled<% end %>" href="/login?type=google">
- <%= translate(locale, "Log in with Google") %>
- </a>
- </div>
- </div>
-
- <hr>
-
<% case account_type when %>
- <% when "google" %>
- <form class="pure-form pure-form-stacked" action="/login?referer=<%= URI.encode_www_form(referer) %>&type=google" method="post">
- <fieldset>
- <% if email %>
- <input name="email" type="hidden" value="<%= email %>">
- <% else %>
- <label for="email"><%= translate(locale, "E-mail") %> :</label>
- <input required class="pure-input-1" name="email" type="email" placeholder="<%= translate(locale, "E-mail") %>">
- <% end %>
-
- <% if password %>
- <input name="password" type="hidden" value="<%= HTML.escape(password) %>">
- <% else %>
- <label for="password"><%= translate(locale, "Password") %> :</label>
- <input required class="pure-input-1" name="password" type="password" placeholder="<%= translate(locale, "Password") %>">
- <% end %>
-
- <% if prompt %>
- <label for="tfa"><%= translate(locale, prompt) %> :</label>
- <input required class="pure-input-1" name="tfa" type="text" placeholder="<%= translate(locale, prompt) %>">
- <% end %>
-
- <% if tfa %>
- <input type="hidden" name="tfa" value="<%= tfa %>">
- <% end %>
-
- <% if captcha %>
- <img style="width:50%" src="/Captcha?v=2&ctoken=<%= captcha[:tokens][0] %>"/>
- <input type="hidden" name="token" value="<%= captcha[:tokens][0] %>">
- <label for="answer"><%= translate(locale, "Answer") %> :</label>
- <input type="text" name="answer" type="text" placeholder="<%= translate(locale, "Answer") %>">
- <% end %>
-
- <button type="submit" class="pure-button pure-button-primary"><%= translate(locale, "Sign In") %></button>
- </fieldset>
- </form>
<% else # "invidious" %>
<form class="pure-form pure-form-stacked" action="/login?referer=<%= URI.encode_www_form(referer) %>&type=invidious" method="post">
<fieldset>
<% if email %>
- <input name="email" type="hidden" value="<%= email %>">
+ <input name="email" type="hidden" value="<%= HTML.escape(email) %>">
<% else %>
<label for="email"><%= translate(locale, "User ID") %> :</label>
<input required class="pure-input-1" name="email" type="text" placeholder="<%= translate(locale, "User ID") %>">
@@ -81,7 +30,7 @@
<% captcha = captcha.not_nil! %>
<img style="width:50%" src='<%= captcha[:question] %>'/>
<% captcha[:tokens].each_with_index do |token, i| %>
- <input type="hidden" name="token[<%= i %>]" value="<%= URI.encode_www_form(token) %>">
+ <input type="hidden" name="token[<%= i %>]" value="<%= HTML.escape(token) %>">
<% end %>
<input type="hidden" name="captcha_type" value="image">
<label for="answer"><%= translate(locale, "Time (h:mm:ss):") %></label>
@@ -89,7 +38,7 @@
<% else # "text" %>
<% captcha = captcha.not_nil! %>
<% captcha[:tokens].each_with_index do |token, i| %>
- <input type="hidden" name="token[<%= i %>]" value="<%= URI.encode_www_form(token) %>">
+ <input type="hidden" name="token[<%= i %>]" value="<%= HTML.escape(token) %>">
<% end %>
<input type="hidden" name="captcha_type" value="text">
<label for="answer"><%= captcha[:question] %></label>
diff --git a/src/invidious/views/preferences.ecr b/src/invidious/views/user/preferences.ecr
index 602340a4..cf8b5593 100644
--- a/src/invidious/views/preferences.ecr
+++ b/src/invidious/views/user/preferences.ecr
@@ -5,40 +5,45 @@
<div class="h-box">
<form class="pure-form pure-form-aligned" action="/preferences?referer=<%= URI.encode_www_form(referer) %>" method="post">
<fieldset>
- <legend><%= translate(locale, "Player preferences") %></legend>
+ <legend><%= translate(locale, "preferences_category_player") %></legend>
<div class="pure-control-group">
- <label for="video_loop"><%= translate(locale, "Always loop: ") %></label>
+ <label for="video_loop"><%= translate(locale, "preferences_video_loop_label") %></label>
<input name="video_loop" id="video_loop" type="checkbox" <% if preferences.video_loop %>checked<% end %>>
</div>
<div class="pure-control-group">
- <label for="autoplay"><%= translate(locale, "Autoplay: ") %></label>
+ <label for="preload"><%= translate(locale, "preferences_preload_label") %></label>
+ <input name="preload" id="preload" type="checkbox" <% if preferences.preload %>checked<% end %>>
+ </div>
+
+ <div class="pure-control-group">
+ <label for="autoplay"><%= translate(locale, "preferences_autoplay_label") %></label>
<input name="autoplay" id="autoplay" type="checkbox" <% if preferences.autoplay %>checked<% end %>>
</div>
<div class="pure-control-group">
- <label for="continue"><%= translate(locale, "Play next by default: ") %></label>
+ <label for="continue"><%= translate(locale, "preferences_continue_label") %></label>
<input name="continue" id="continue" type="checkbox" <% if preferences.continue %>checked<% end %>>
</div>
<div class="pure-control-group">
- <label for="continue_autoplay"><%= translate(locale, "Autoplay next video: ") %></label>
+ <label for="continue_autoplay"><%= translate(locale, "preferences_continue_autoplay_label") %></label>
<input name="continue_autoplay" id="continue_autoplay" type="checkbox" <% if preferences.continue_autoplay %>checked<% end %>>
</div>
<div class="pure-control-group">
- <label for="local"><%= translate(locale, "Proxy videos: ") %></label>
+ <label for="local"><%= translate(locale, "preferences_local_label") %></label>
<input name="local" id="local" type="checkbox" <% if preferences.local && !CONFIG.disabled?("local") %>checked<% end %> <% if CONFIG.disabled?("local") %>disabled<% end %>>
</div>
<div class="pure-control-group">
- <label for="listen"><%= translate(locale, "Listen by default: ") %></label>
+ <label for="listen"><%= translate(locale, "preferences_listen_label") %></label>
<input name="listen" id="listen" type="checkbox" <% if preferences.listen %>checked<% end %>>
</div>
<div class="pure-control-group">
- <label for="speed"><%= translate(locale, "Default speed: ") %></label>
+ <label for="speed"><%= translate(locale, "preferences_speed_label") %></label>
<select name="speed" id="speed">
<% {2.0, 1.75, 1.5, 1.25, 1.0, 0.75, 0.5, 0.25}.each do |option| %>
<option <% if preferences.speed == option %> selected <% end %>><%= option %></option>
@@ -47,11 +52,11 @@
</div>
<div class="pure-control-group">
- <label for="quality"><%= translate(locale, "Preferred video quality: ") %></label>
+ <label for="quality"><%= translate(locale, "preferences_quality_label") %></label>
<select name="quality" id="quality">
<% {"dash", "hd720", "medium", "small"}.each do |option| %>
<% if !(option == "dash" && CONFIG.disabled?("dash")) %>
- <option value="<%= option %>" <% if preferences.quality == option %> selected <% end %>><%= translate(locale, option) %></option>
+ <option value="<%= option %>" <% if preferences.quality == option %> selected <% end %>><%= translate(locale, "preferences_quality_option_" + option) %></option>
<% end %>
<% end %>
</select>
@@ -59,23 +64,23 @@
<% if !CONFIG.disabled?("dash") %>
<div class="pure-control-group">
- <label for="quality_dash"><%= translate(locale, "Preferred dash video quality: ") %></label>
+ <label for="quality_dash"><%= translate(locale, "preferences_quality_dash_label") %></label>
<select name="quality_dash" id="quality_dash">
<% {"auto", "best", "4320p", "2160p", "1440p", "1080p", "720p", "480p", "360p", "240p", "144p", "worst"}.each do |option| %>
- <option value="<%= option %>" <% if preferences.quality_dash == option %> selected <% end %>><%= translate(locale, option) %></option>
+ <option value="<%= option %>" <% if preferences.quality_dash == option %> selected <% end %>><%= translate(locale, "preferences_quality_dash_option_" + option) %></option>
<% end %>
</select>
</div>
<% end %>
<div class="pure-control-group">
- <label for="volume"><%= translate(locale, "Player volume: ") %></label>
+ <label for="volume"><%= translate(locale, "preferences_volume_label") %></label>
<input name="volume" id="volume" data-onrange="update_volume_value" type="range" min="0" max="100" step="5" value="<%= preferences.volume %>">
<span class="pure-form-message-inline" id="volume-value"><%= preferences.volume %></span>
</div>
<div class="pure-control-group">
- <label for="comments[0]"><%= translate(locale, "Default comments: ") %></label>
+ <label for="comments[0]"><%= translate(locale, "preferences_comments_label") %></label>
<% preferences.comments.each_with_index do |comments, index| %>
<select name="comments[<%= index %>]" id="comments[<%= index %>]">
<% {"", "youtube", "reddit"}.each do |option| %>
@@ -86,10 +91,10 @@
</div>
<div class="pure-control-group">
- <label for="captions[0]"><%= translate(locale, "Default captions: ") %></label>
+ <label for="captions[0]"><%= translate(locale, "preferences_captions_label") %></label>
<% preferences.captions.each_with_index do |caption, index| %>
<select class="pure-u-1-6" name="captions[<%= index %>]" id="captions[<%= index %>]">
- <% CAPTION_LANGUAGES.each do |option| %>
+ <% Invidious::Videos::Captions::LANGUAGES.each do |option| %>
<option value="<%= option %>" <% if preferences.captions[index] == option %> selected <% end %>><%= translate(locale, option.blank? ? "none" : option) %></option>
<% end %>
</select>
@@ -97,34 +102,52 @@
</div>
<div class="pure-control-group">
- <label for="related_videos"><%= translate(locale, "Show related videos: ") %></label>
+ <label for="related_videos"><%= translate(locale, "preferences_related_videos_label") %></label>
<input name="related_videos" id="related_videos" type="checkbox" <% if preferences.related_videos %>checked<% end %>>
</div>
<div class="pure-control-group">
- <label for="annotations"><%= translate(locale, "Show annotations by default: ") %></label>
+ <label for="annotations"><%= translate(locale, "preferences_annotations_label") %></label>
<input name="annotations" id="annotations" type="checkbox" <% if preferences.annotations %>checked<% end %>>
</div>
<div class="pure-control-group">
- <label for="extend_desc"><%= translate(locale, "Automatically extend video description: ") %></label>
+ <label for="extend_desc"><%= translate(locale, "preferences_extend_desc_label") %></label>
<input name="extend_desc" id="extend_desc" type="checkbox" <% if preferences.extend_desc %>checked<% end %>>
</div>
+ <div class="pure-control-group">
+ <label for="vr_mode"><%= translate(locale, "preferences_vr_mode_label") %></label>
+ <input name="vr_mode" id="vr_mode" type="checkbox" <% if preferences.vr_mode %>checked<% end %>>
+ </div>
+
+ <div class="pure-control-group">
+ <label for="save_player_pos"><%= translate(locale, "preferences_save_player_pos_label") %></label>
+ <input name="save_player_pos" id="save_player_pos" type="checkbox" <% if preferences.save_player_pos %>checked<% end %>>
+ </div>
- <legend><%= translate(locale, "Visual preferences") %></legend>
+ <legend><%= translate(locale, "preferences_category_visual") %></legend>
<div class="pure-control-group">
- <label for="locale"><%= translate(locale, "Language: ") %></label>
+ <label for="locale"><%= translate(locale, "preferences_locale_label") %></label>
<select name="locale" id="locale">
- <% LOCALES.each_key do |option| %>
- <option value="<%= option %>" <% if preferences.locale == option %> selected <% end %>><%= option %></option>
+ <% LOCALES_LIST.each do |iso_name, full_name| %>
+ <option value="<%= iso_name %>" <% if preferences.locale == iso_name %> selected <% end %>><%= HTML.escape(full_name) %></option>
<% end %>
</select>
</div>
<div class="pure-control-group">
- <label for="player_style"><%= translate(locale, "Player style: ") %></label>
+ <label for="region"><%= translate(locale, "preferences_region_label") %></label>
+ <select name="region" id="region">
+ <% CONTENT_REGIONS.each do |option| %>
+ <option value="<%= option %>" <% if preferences.region == option %> selected <% end %>><%= option %></option>
+ <% end %>
+ </select>
+ </div>
+
+ <div class="pure-control-group">
+ <label for="player_style"><%= translate(locale, "preferences_player_style_label") %></label>
<select name="player_style" id="player_style">
<% {"invidious", "youtube"}.each do |option| %>
<option value="<%= option %>" <% if preferences.player_style == option %> selected <% end %>><%= translate(locale, option) %></option>
@@ -133,7 +156,7 @@
</div>
<div class="pure-control-group">
- <label for="dark_mode"><%= translate(locale, "Theme: ") %></label>
+ <label for="dark_mode"><%= translate(locale, "preferences_dark_mode_label") %></label>
<select name="dark_mode" id="dark_mode">
<% {"", "light", "dark"}.each do |option| %>
<option value="<%= option %>" <% if preferences.dark_mode == option %> selected <% end %>><%= translate(locale, option.blank? ? "auto" : option) %></option>
@@ -142,7 +165,7 @@
</div>
<div class="pure-control-group">
- <label for="thin_mode"><%= translate(locale, "Thin mode: ") %></label>
+ <label for="thin_mode"><%= translate(locale, "preferences_thin_mode_label") %></label>
<input name="thin_mode" id="thin_mode" type="checkbox" <% if preferences.thin_mode %>checked<% end %>>
</div>
@@ -153,7 +176,7 @@
<% end %>
<div class="pure-control-group">
- <label for="default_home"><%= translate(locale, "Default homepage: ") %></label>
+ <label for="default_home"><%= translate(locale, "preferences_default_home_label") %></label>
<select name="default_home" id="default_home">
<% feed_options.each do |option| %>
<option value="<%= option %>" <% if preferences.default_home == option %> selected <% end %>><%= translate(locale, option.blank? ? "Search" : option) %></option>
@@ -162,7 +185,7 @@
</div>
<div class="pure-control-group">
- <label for="feed_menu"><%= translate(locale, "Feed menu: ") %></label>
+ <label for="feed_menu"><%= translate(locale, "preferences_feed_menu_label") %></label>
<% (feed_options.size - 1).times do |index| %>
<select name="feed_menu[<%= index %>]" id="feed_menu[<%= index %>]">
<% feed_options.each do |option| %>
@@ -171,22 +194,40 @@
</select>
<% end %>
</div>
+ <% if env.get? "user" %>
+ <div class="pure-control-group">
+ <label for="show_nick"><%= translate(locale, "preferences_show_nick_label") %></label>
+ <input name="show_nick" id="show_nick" type="checkbox" <% if preferences.show_nick %>checked<% end %>>
+ </div>
+ <% end %>
+
+ <legend><%= translate(locale, "preferences_category_misc") %></legend>
+
+ <div class="pure-control-group">
+ <label for="automatic_instance_redirect"><%= translate(locale, "preferences_automatic_instance_redirect_label") %></label>
+ <input name="automatic_instance_redirect" id="automatic_instance_redirect" type="checkbox" <% if preferences.automatic_instance_redirect %>checked<% end %>>
+ </div>
<% if env.get? "user" %>
- <legend><%= translate(locale, "Subscription preferences") %></legend>
+ <legend><%= translate(locale, "preferences_category_subscription") %></legend>
<div class="pure-control-group">
- <label for="annotations_subscribed"><%= translate(locale, "Show annotations by default for subscribed channels: ") %></label>
+ <label for="watch_history"><%= translate(locale, "preferences_watch_history_label") %></label>
+ <input name="watch_history" id="watch_history" type="checkbox" <% if preferences.watch_history %>checked<% end %>>
+ </div>
+
+ <div class="pure-control-group">
+ <label for="annotations_subscribed"><%= translate(locale, "preferences_annotations_subscribed_label") %></label>
<input name="annotations_subscribed" id="annotations_subscribed" type="checkbox" <% if preferences.annotations_subscribed %>checked<% end %>>
</div>
<div class="pure-control-group">
- <label for="max_results"><%= translate(locale, "Number of videos shown in feed: ") %></label>
+ <label for="max_results"><%= translate(locale, "preferences_max_results_label") %></label>
<input name="max_results" id="max_results" type="number" value="<%= preferences.max_results %>">
</div>
<div class="pure-control-group">
- <label for="sort"><%= translate(locale, "Sort videos by: ") %></label>
+ <label for="sort"><%= translate(locale, "preferences_sort_label") %></label>
<select name="sort" id="sort">
<% {"published", "published - reverse", "alphabetically", "alphabetically - reverse", "channel name", "channel name - reverse"}.each do |option| %>
<option value="<%= option %>" <% if preferences.sort == option %> selected <% end %>><%= translate(locale, option) %></option>
@@ -204,12 +245,13 @@
</div>
<div class="pure-control-group">
- <label for="unseen_only"><%= translate(locale, "Only show unwatched: ") %></label>
+ <label for="unseen_only"><%= translate(locale, "preferences_unseen_only_label") %></label>
<input name="unseen_only" id="unseen_only" type="checkbox" <% if preferences.unseen_only %>checked<% end %>>
</div>
+ <% if CONFIG.enable_user_notifications %>
<div class="pure-control-group">
- <label for="notifications_only"><%= translate(locale, "Only show notifications (if there are any): ") %></label>
+ <label for="notifications_only"><%= translate(locale, "preferences_notifications_only_label") %></label>
<input name="notifications_only" id="notifications_only" type="checkbox" <% if preferences.notifications_only %>checked<% end %>>
</div>
@@ -219,13 +261,14 @@
<a href="#" data-onclick="notification_requestPermission"><%= translate(locale, "Enable web notifications") %></a>
</div>
<% end %>
+ <% end %>
<% end %>
- <% if env.get?("user") && CONFIG.admins.includes? env.get?("user").as(User).email %>
- <legend><%= translate(locale, "Administrator preferences") %></legend>
+ <% if env.get?("user") && CONFIG.admins.includes? env.get?("user").as(Invidious::User).email %>
+ <legend><%= translate(locale, "preferences_category_admin") %></legend>
<div class="pure-control-group">
- <label for="admin_default_home"><%= translate(locale, "Default homepage: ") %></label>
+ <label for="admin_default_home"><%= translate(locale, "preferences_default_home_label") %></label>
<select name="admin_default_home" id="admin_default_home">
<% feed_options.each do |option| %>
<option value="<%= option %>" <% if CONFIG.default_user_preferences.default_home == option %> selected <% end %>><%= translate(locale, option.blank? ? "none" : option) %></option>
@@ -234,7 +277,7 @@
</div>
<div class="pure-control-group">
- <label for="admin_feed_menu"><%= translate(locale, "Feed menu: ") %></label>
+ <label for="admin_feed_menu"><%= translate(locale, "preferences_feed_menu_label") %></label>
<% (feed_options.size - 1).times do |index| %>
<select name="admin_feed_menu[<%= index %>]" id="admin_feed_menu[<%= index %>]">
<% feed_options.each do |option| %>
@@ -269,10 +312,15 @@
<label for="statistics_enabled"><%= translate(locale, "Report statistics: ") %></label>
<input name="statistics_enabled" id="statistics_enabled" type="checkbox" <% if CONFIG.statistics_enabled %>checked<% end %>>
</div>
+
+ <div class="pure-control-group">
+ <label for="modified_source_code_url"><%= translate(locale, "adminprefs_modified_source_code_url_label") %></label>
+ <input name="modified_source_code_url" id="modified_source_code_url" type="url" value="<%= CONFIG.modified_source_code_url %>">
+ </div>
<% end %>
<% if env.get? "user" %>
- <legend><%= translate(locale, "Data preferences") %></legend>
+ <legend><%= translate(locale, "preferences_category_data") %></legend>
<div class="pure-control-group">
<a href="/clear_watch_history?referer=<%= URI.encode_www_form(referer) %>"><%= translate(locale, "Clear watch history") %></a>
@@ -295,7 +343,7 @@
</div>
<div class="pure-control-group">
- <a href="/view_all_playlists"><%= translate(locale, "View all playlists") %></a>
+ <a href="/feed/playlists"><%= translate(locale, "View all playlists") %></a>
</div>
<div class="pure-control-group">
diff --git a/src/invidious/views/subscription_manager.ecr b/src/invidious/views/user/subscription_manager.ecr
index 6cddcd6c..c9801f09 100644
--- a/src/invidious/views/subscription_manager.ecr
+++ b/src/invidious/views/user/subscription_manager.ecr
@@ -6,19 +6,19 @@
<div class="pure-u-1-3">
<h3>
<a href="/feed/subscriptions">
- <%= translate(locale, "`x` subscriptions", %(<span id="count">#{subscriptions.size}</span>)) %>
+ <%= translate_count(locale, "generic_subscriptions_count", subscriptions.size, NumberFormatting::HtmlSpan) %>
</a>
</h3>
</div>
- <div class="pure-u-1-3" style="text-align:center">
- <h3>
+ <div class="pure-u-1-3">
+ <h3 style="text-align:center">
<a href="/feed/history">
<%= translate(locale, "Watch history") %>
</a>
</h3>
</div>
- <div class="pure-u-1-3" style="text-align:right">
- <h3>
+ <div class="pure-u-1-3">
+ <h3 style="text-align:right">
<a href="/data_control?referer=<%= URI.encode_www_form(referer) %>">
<%= translate(locale, "Import/export") %>
</a>
@@ -31,17 +31,15 @@
<div class="pure-g<% if channel.deleted %> deleted <% end %>">
<div class="pure-u-2-5">
<h3 style="padding-left:0.5em">
- <a href="/channel/<%= channel.id %>"><%= channel.author %></a>
+ <a href="/channel/<%= channel.id %>"><%= HTML.escape(channel.author) %></a>
</h3>
</div>
<div class="pure-u-2-5"></div>
<div class="pure-u-1-5" style="text-align:right">
<h3 style="padding-right:0.5em">
<form data-onsubmit="return_false" action="/subscription_ajax?action_remove_subscriptions=1&c=<%= channel.id %>&referer=<%= env.get("current_page") %>" method="post">
- <input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(env.get?("csrf_token").try &.as(String) || "") %>">
- <a data-onclick="remove_subscription" data-ucid="<%= channel.id %>" href="#">
- <input style="all:unset" type="submit" value="<%= translate(locale, "unsubscribe") %>">
- </a>
+ <input type="hidden" name="csrf_token" value="<%= HTML.escape(env.get?("csrf_token").try &.as(String) || "") %>">
+ <input style="all:unset" type="submit" data-onclick="remove_subscription" data-ucid="<%= channel.id %>" value="<%= translate(locale, "unsubscribe") %>">
</form>
</h3>
</div>
diff --git a/src/invidious/views/token_manager.ecr b/src/invidious/views/user/token_manager.ecr
index e48aec2f..a73fa048 100644
--- a/src/invidious/views/token_manager.ecr
+++ b/src/invidious/views/user/token_manager.ecr
@@ -5,7 +5,7 @@
<div class="pure-g h-box">
<div class="pure-u-1-3">
<h3>
- <%= translate(locale, "`x` tokens", %(<span id="count">#{tokens.size}</span>)) %>
+ <%= translate_count(locale, "tokens_count", tokens.size, NumberFormatting::HtmlSpan) %>
</h3>
</div>
<div class="pure-u-1-3"></div>
@@ -30,10 +30,8 @@
<div class="pure-u-1-5" style="text-align:right">
<h3 style="padding-right:0.5em">
<form data-onsubmit="return_false" action="/token_ajax?action_revoke_token=1&session=<%= token[:session] %>&referer=<%= env.get("current_page") %>" method="post">
- <input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(env.get?("csrf_token").try &.as(String) || "") %>">
- <a data-onclick="revoke_token" data-session="<%= token[:session] %>" href="#">
- <input style="all:unset" type="submit" value="<%= translate(locale, "revoke") %>">
- </a>
+ <input type="hidden" name="csrf_token" value="<%= HTML.escape(env.get?("csrf_token").try &.as(String) || "") %>">
+ <input style="all:unset" type="submit" data-onclick="revoke_token" data-session="<%= token[:session] %>" value="<%= translate(locale, "revoke") %>">
</form>
</h3>
</div>
diff --git a/src/invidious/views/view_all_playlists.ecr b/src/invidious/views/view_all_playlists.ecr
deleted file mode 100644
index 5ec6aa31..00000000
--- a/src/invidious/views/view_all_playlists.ecr
+++ /dev/null
@@ -1,38 +0,0 @@
-<% content_for "header" do %>
-<title><%= translate(locale, "Playlists") %> - Invidious</title>
-<% end %>
-
-<%= rendered "components/feed_menu" %>
-
-<div class="pure-g h-box">
- <div class="pure-u-2-3">
- <h3><%= translate(locale, "`x` created playlists", %(<span id="count">#{items_created.size}</span>)) %></h3>
- </div>
- <div class="pure-u-1-3" style="text-align:right">
- <h3>
- <a href="/create_playlist?referer=<%= URI.encode_www_form(referer) %>"><%= translate(locale, "Create playlist") %></a>
- </h3>
- </div>
-</div>
-
-<div class="pure-g">
- <% items_created.each_slice(4) do |slice| %>
- <% slice.each do |item| %>
- <%= rendered "components/item" %>
- <% end %>
- <% end %>
-</div>
-
-<div class="pure-g h-box">
- <div class="pure-u-1">
- <h3><%= translate(locale, "`x` saved playlists", %(<span id="count">#{items_saved.size}</span>)) %></h3>
- </div>
-</div>
-
-<div class="pure-g">
- <% items_saved.each_slice(4) do |slice| %>
- <% slice.each do |item| %>
- <%= rendered "components/item" %>
- <% end %>
- <% end %>
-</div>
diff --git a/src/invidious/views/watch.ecr b/src/invidious/views/watch.ecr
index 8b587eb3..45c58a16 100644
--- a/src/invidious/views/watch.ecr
+++ b/src/invidious/views/watch.ecr
@@ -1,12 +1,17 @@
+<% ucid = video.ucid %>
+<% title = HTML.escape(video.title) %>
+<% author = HTML.escape(video.author) %>
+
+
<% content_for "header" do %>
<meta name="thumbnail" content="<%= thumbnail %>">
<meta name="description" content="<%= HTML.escape(video.short_description) %>">
<meta name="keywords" content="<%= video.keywords.join(",") %>">
-<meta property="og:site_name" content="Invidious">
+<meta property="og:site_name" content="<%= author %> | Invidious">
<meta property="og:url" content="<%= HOST_URL %>/watch?v=<%= video.id %>">
-<meta property="og:title" content="<%= HTML.escape(video.title) %>">
-<meta property="og:image" content="/vi/<%= video.id %>/maxres.jpg">
-<meta property="og:description" content="<%= video.short_description %>">
+<meta property="og:title" content="<%= title %>">
+<meta property="og:image" content="<%= HOST_URL %>/vi/<%= video.id %>/maxres.jpg">
+<meta property="og:description" content="<%= HTML.escape(video.short_description) %>">
<meta property="og:type" content="video.other">
<meta property="og:video:url" content="<%= HOST_URL %>/embed/<%= video.id %>">
<meta property="og:video:secure_url" content="<%= HOST_URL %>/embed/<%= video.id %>">
@@ -14,27 +19,26 @@
<meta property="og:video:width" content="1280">
<meta property="og:video:height" content="720">
<meta name="twitter:card" content="player">
-<meta name="twitter:site" content="@omarroth1">
<meta name="twitter:url" content="<%= HOST_URL %>/watch?v=<%= video.id %>">
-<meta name="twitter:title" content="<%= HTML.escape(video.title) %>">
-<meta name="twitter:description" content="<%= video.short_description %>">
+<meta name="twitter:title" content="<%= title %>">
+<meta name="twitter:description" content="<%= HTML.escape(video.short_description) %>">
<meta name="twitter:image" content="<%= HOST_URL %>/vi/<%= video.id %>/maxres.jpg">
<meta name="twitter:player" content="<%= HOST_URL %>/embed/<%= video.id %>">
<meta name="twitter:player:width" content="1280">
<meta name="twitter:player:height" content="720">
<link rel="alternate" href="https://www.youtube.com/watch?v=<%= video.id %>">
<%= rendered "components/player_sources" %>
-<title><%= HTML.escape(video.title) %> - Invidious</title>
+<title><%= title %> - Invidious</title>
<!-- Description expansion also updates the 'Show more' button to 'Show less' so
we're going to need to do it here in order to allow for translations.
-->
<style>
-#descexpansionbutton + label > a::after {
+#descexpansionbutton ~ label > a::after {
content: "<%= translate(locale, "Show more") %>"
}
-#descexpansionbutton:checked + label > a::after {
+#descexpansionbutton:checked ~ label > a::after {
content: "<%= translate(locale, "Show less") %>"
}
</style>
@@ -57,7 +61,11 @@ we're going to need to do it here in order to allow for translations.
"show_replies_text" => HTML.escape(translate(locale, "Show replies")),
"params" => params,
"preferences" => preferences,
- "premiere_timestamp" => video.premiere_timestamp.try &.to_unix
+ "premiere_timestamp" => video.premiere_timestamp.try &.to_unix,
+ "vr" => video.vr?,
+ "projection_type" => video.projection_type,
+ "local_disabled" => CONFIG.disabled?("local"),
+ "support_reddit" => true
}.to_pretty_json
%>
</script>
@@ -68,7 +76,7 @@ we're going to need to do it here in order to allow for translations.
<div class="h-box">
<h1>
- <%= HTML.escape(video.title) %>
+ <%= title %>
<% if params.listen %>
<a title="<%=translate(locale, "Video mode")%>" href="/watch?<%= env.params.query %>&listen=0">
<i class="icon ion-ios-videocam"></i>
@@ -96,7 +104,7 @@ we're going to need to do it here in order to allow for translations.
</h3>
<% elsif video.live_now %>
<h3>
- <%= video.premiere_timestamp.try { |t| translate(locale, "Started streaming `x` ago", recode_date((Time.utc - t).ago, locale)) } %>
+ <%= video.premiere_timestamp.try { |t| translate(locale, "videoinfo_started_streaming_x_ago", recode_date((Time.utc - t).ago, locale)) } %>
</h3>
<% end %>
</div>
@@ -105,12 +113,36 @@ we're going to need to do it here in order to allow for translations.
<div class="pure-u-1 pure-u-lg-1-5">
<div class="h-box">
<span id="watch-on-youtube">
- <a href="https://www.youtube.com/watch?v=<%= video.id %>"><%= translate(locale, "Watch on YouTube") %></a>
- (<a href="https://www.youtube.com/embed/<%= video.id %>"><%= translate(locale, "Embed") %></a>)
+ <%-
+ link_yt_watch = URI.new(scheme: "https", host: "www.youtube.com", path: "/watch", query: "v=#{video.id}")
+ link_yt_embed = URI.new(scheme: "https", host: "www.youtube.com", path: "/embed/#{video.id}")
+
+ if !plid.nil? && !continuation.nil?
+ link_yt_param = URI::Params{"list" => [plid], "index" => [continuation.to_s]}
+ link_yt_watch = IV::HttpServer::Utils.add_params_to_url(link_yt_watch, link_yt_param)
+ link_yt_embed = IV::HttpServer::Utils.add_params_to_url(link_yt_embed, link_yt_param)
+ end
+ -%>
+ <a id="link-yt-watch" rel="noreferrer noopener" data-base-url="<%= link_yt_watch %>" href="<%= link_yt_watch %>"><%= translate(locale, "videoinfo_watch_on_youTube") %></a>
+ (<a id="link-yt-embed" rel="noreferrer noopener" data-base-url="<%= link_yt_embed %>" href="<%= link_yt_embed %>"><%= translate(locale, "videoinfo_youTube_embed_link") %></a>)
</span>
+
+ <p id="watch-on-another-invidious-instance">
+ <%- link_iv_other = IV::Frontend::Misc.redirect_url(env) -%>
+ <a id="link-iv-other" data-base-url="<%= link_iv_other %>" href="<%= link_iv_other %>"><%= translate(locale, "Switch Invidious Instance") %></a>
+ </p>
+
<p id="embed-link">
- <a href="<%= embed_link %>"><%= translate(locale, "Embed Link") %></a>
+ <%-
+ params_iv_embed = env.params.query.dup
+ params_iv_embed.delete_all("v")
+
+ link_iv_embed = URI.new(path: "/embed/#{id}")
+ link_iv_embed = IV::HttpServer::Utils.add_params_to_url(link_iv_embed, params_iv_embed)
+ -%>
+ <a id="link-iv-embed" data-base-url="<%= link_iv_embed %>" href="<%= link_iv_embed %>"><%= translate(locale, "videoinfo_invidious_embed_link") %></a>
</p>
+
<p id="annotations">
<% if params.annotations %>
<a href="/watch?<%= env.params.query %>&iv_load_policy=3">
@@ -124,18 +156,21 @@ we're going to need to do it here in order to allow for translations.
</p>
<% if user %>
- <% playlists = PG_DB.query_all("SELECT id,title FROM playlists WHERE author = $1 AND id LIKE 'IV%'", user.email, as: {String, String}) %>
+ <% playlists = Invidious::Database::Playlists.select_user_created_playlists(user.email) %>
<% if !playlists.empty? %>
- <form data-onsubmit="return_false" class="pure-form pure-form-stacked" action="/playlist_ajax" method="post">
+ <form data-onsubmit="return_false" class="pure-form pure-form-stacked" action="/playlist_ajax" method="post" target="_blank">
<div class="pure-control-group">
<label for="playlist_id"><%= translate(locale, "Add to playlist: ") %></label>
<select style="width:100%" name="playlist_id" id="playlist_id">
- <% playlists.each do |plid, title| %>
- <option data-plid="<%= plid %>" value="<%= plid %>"><%= title %></option>
+ <% playlists.each do |plid, playlist_title| %>
+ <option data-plid="<%= plid %>" value="<%= plid %>"><%= HTML.escape(playlist_title) %></option>
<% end %>
</select>
</div>
+ <input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(env.get?("csrf_token").try &.as(String) || "") %>">
+ <input type="hidden" name="action_add_video" value="1">
+ <input type="hidden" name="video_id" value="<%= video.id %>">
<button data-onclick="add_playlist_video" data-id="<%= video.id %>" type="submit" class="pure-button pure-button-primary">
<b><%= translate(locale, "Add to playlist") %></b>
</button>
@@ -151,45 +186,11 @@ we're going to need to do it here in order to allow for translations.
<% end %>
<% end %>
- <% if CONFIG.dmca_content.includes?(video.id) || CONFIG.disabled?("downloads") %>
- <p id="download"><%= translate(locale, "Download is disabled.") %></p>
- <% else %>
- <form class="pure-form pure-form-stacked" action="/latest_version" method="get" rel="noopener" target="_blank">
- <div class="pure-control-group">
- <label for="download_widget"><%= translate(locale, "Download as: ") %></label>
- <select style="width:100%" name="download_widget" id="download_widget">
- <% fmt_stream.each do |option| %>
- <option value='{"id":"<%= video.id %>","itag":"<%= option["itag"] %>","title":"<%= URI.encode_www_form(video.title) %>-<%= video.id %>.<%= option["mimeType"].as_s.split(";")[0].split("/")[1] %>"}'>
- <%= itag_to_metadata?(option["itag"]).try &.["height"]? || "~240" %>p - <%= option["mimeType"].as_s.split(";")[0] %>
- </option>
- <% end %>
- <% video_streams.each do |option| %>
- <option value='{"id":"<%= video.id %>","itag":"<%= option["itag"] %>","title":"<%= URI.encode_www_form(video.title) %>-<%= video.id %>.<%= option["mimeType"].as_s.split(";")[0].split("/")[1] %>"}'>
- <%= option["qualityLabel"] %> - <%= option["mimeType"].as_s.split(";")[0] %> @ <%= option["fps"] %>fps - video only
- </option>
- <% end %>
- <% audio_streams.each do |option| %>
- <option value='{"id":"<%= video.id %>","itag":"<%= option["itag"] %>","title":"<%= URI.encode_www_form(video.title) %>-<%= video.id %>.<%= option["mimeType"].as_s.split(";")[0].split("/")[1] %>"}'>
- <%= option["mimeType"].as_s.split(";")[0] %> @ <%= option["bitrate"]?.try &.as_i./ 1000 %>k - audio only
- </option>
- <% end %>
- <% captions.each do |caption| %>
- <option value='{"id":"<%= video.id %>","label":"<%= caption.name.simpleText %>","title":"<%= URI.encode_www_form(video.title) %>-<%= video.id %>.<%= caption.languageCode %>.vtt"}'>
- <%= translate(locale, "Subtitles - `x` (.vtt)", caption.name.simpleText) %>
- </option>
- <% end %>
- </select>
- </div>
-
- <button type="submit" class="pure-button pure-button-primary">
- <b><%= translate(locale, "Download") %></b>
- </button>
- </form>
- <% end %>
+ <%= Invidious::Frontend::WatchPage.download_widget(locale, video, video_assets) %>
<p id="views"><i class="icon ion-ios-eye"></i> <%= number_with_separator(video.views) %></p>
<p id="likes"><i class="icon ion-ios-thumbs-up"></i> <%= number_with_separator(video.likes) %></p>
- <p id="dislikes"><i class="icon ion-ios-thumbs-down"></i> <%= number_with_separator(video.dislikes) %></p>
+ <p id="dislikes" style="display: none; visibility: hidden;"></p>
<p id="genre"><%= translate(locale, "Genre: ") %>
<% if !video.genre_url %>
<%= video.genre %>
@@ -198,12 +199,16 @@ we're going to need to do it here in order to allow for translations.
<% end %>
</p>
<% if video.license %>
- <p id="license"><%= translate(locale, "License: ") %><%= video.license %></p>
+ <% if video.license.empty? %>
+ <p id="license"><%= translate(locale, "License: ") %><%= translate(locale, "Standard YouTube license") %></p>
+ <% else %>
+ <p id="license"><%= translate(locale, "License: ") %><%= video.license %></p>
+ <% end %>
<% end %>
<p id="family_friendly"><%= translate(locale, "Family friendly? ") %><%= translate_bool(locale, video.is_family_friendly) %></p>
- <p id="wilson"><%= translate(locale, "Wilson score: ") %><%= video.wilson_score %></p>
- <p id="rating"><%= translate(locale, "Rating: ") %><%= video.average_rating %> / 5</p>
- <p id="engagement"><%= translate(locale, "Engagement: ") %><%= video.engagement %>%</p>
+ <p id="wilson" style="display: none; visibility: hidden;"></p>
+ <p id="rating" style="display: none; visibility: hidden;"></p>
+ <p id="engagement" style="display: none; visibility: hidden;"></p>
<% if video.allowed_regions.size != REGIONS.size %>
<p id="allowed_regions">
<% if video.allowed_regions.size < REGIONS.size // 2 %>
@@ -217,21 +222,28 @@ we're going to need to do it here in order to allow for translations.
</div>
<div class="pure-u-1 <% if params.related_videos || plid %>pure-u-lg-3-5<% else %>pure-u-md-4-5<% end %>">
- <div class="h-box">
- <a href="/channel/<%= video.ucid %>" style="display:block;width:fit-content;width:-moz-fit-content">
- <div class="channel-profile">
- <% if !video.author_thumbnail.empty? %>
- <img src="/ggpht<%= URI.parse(video.author_thumbnail).request_target %>">
- <% end %>
- <span id="channel-name"><%= video.author %></span>
- </div>
- </a>
- <% ucid = video.ucid %>
- <% author = video.author %>
- <% sub_count_text = video.sub_count_text %>
- <%= rendered "components/subscribe_widget" %>
+ <div class="pure-g h-box flexible title">
+ <div class="pure-u-1-2 flex-left flexible">
+ <a href="/channel/<%= video.ucid %>">
+ <div class="channel-profile">
+ <% if !video.author_thumbnail.empty? %>
+ <img src="/ggpht<%= URI.parse(video.author_thumbnail).request_target %>" alt="" />
+ <% end %>
+ <span id="channel-name"><%= author %><% if !video.author_verified.nil? && video.author_verified %>&nbsp;<i class="icon ion ion-md-checkmark-circle"></i><% end %></span>
+ </div>
+ </a>
+ </div>
+ <div class="pure-u-1-2 flex-right flexible button-container">
+ <div class="pure-u">
+ <% sub_count_text = video.sub_count_text %>
+ <%= rendered "components/subscribe_widget" %>
+ </div>
+ </div>
+ </div>
+
+ <div class="h-box">
<p id="published-date">
<% if video.premiere_timestamp.try &.> Time.utc %>
<b><%= video.premiere_timestamp.try { |t| translate(locale, "Premieres `x`", t.to_s("%B %-d, %R UTC")) } %></b>
@@ -242,21 +254,41 @@ we're going to need to do it here in order to allow for translations.
<div id="description-box"> <!-- Description -->
<% if video.description.size < 200 || params.extend_desc %>
- <%= video.description_html %>
+ <div id="descriptionWrapper"><%= video.description_html %></div>
<% else %>
<input id="descexpansionbutton" type="checkbox"/>
- <label for="descexpansionbutton" style="order: 1;">
+ <div id="descriptionWrapper"><%= video.description_html %></div>
+ <label for="descexpansionbutton">
<a></a>
</label>
- <div id="descriptionWrapper">
- <%= video.description_html %>
- </div>
<% end %>
</div>
<hr>
- <div id="comments">
+ <% if !video.music.empty? %>
+ <input id="music-desc-expansion" type="checkbox"/>
+ <label for="music-desc-expansion">
+ <h3 id="music-description-title">
+ <%= translate(locale, "Music in this video") %>
+ <span class="icon ion-ios-arrow-up"></span>
+ <span class="icon ion-ios-arrow-down"></span>
+ </h3>
+ </label>
+
+ <div id="music-description-box">
+ <% video.music.each do |music| %>
+ <div class="music-item">
+ <p class="music-song"><%= translate(locale, "Song: ") %><%= music.song %></p>
+ <p class="music-artist"><%= translate(locale, "Artist: ") %><%= music.artist %></p>
+ <p class="music-album"><%= translate(locale, "Album: ") %><%= music.album %></p>
+ </div>
+ <% end %>
+ </div>
+ <hr>
+
+ <% end %>
+ <div id="comments" class="comments">
<% if nojs %>
<%= comment_html %>
<% else %>
@@ -281,7 +313,7 @@ we're going to need to do it here in order to allow for translations.
<% if !video.related_videos.empty? %>
<div <% if plid %>style="display:none"<% end %>>
<div class="pure-control-group">
- <label for="continue"><%= translate(locale, "Play next by default: ") %></label>
+ <label for="continue"><%= translate(locale, "preferences_continue_label") %></label>
<input name="continue" id="continue" type="checkbox" <% if params.continue %>checked<% end %>>
</div>
<hr>
@@ -290,32 +322,47 @@ we're going to need to do it here in order to allow for translations.
<% video.related_videos.each do |rv| %>
<% if rv["id"]? %>
- <a href="/watch?v=<%= rv["id"] %>">
- <% if !env.get("preferences").as(Preferences).thin_mode %>
- <div class="thumbnail">
- <img class="thumbnail" src="/vi/<%= rv["id"] %>/mqdefault.jpg">
- <p class="length"><%= recode_length_seconds(rv["length_seconds"]?.try &.to_i? || 0) %></p>
- </div>
- <% end %>
- <p style="width:100%"><%= rv["title"] %></p>
- <h5 class="pure-g">
- <div class="pure-u-14-24">
- <% if rv["ucid"]? %>
- <b style="width:100%"><a href="/channel/<%= rv["ucid"] %>"><%= rv["author"]? %></a></b>
- <% else %>
- <b style="width:100%"><%= rv["author"]? %></b>
- <% end %>
- </div>
-
- <div class="pure-u-10-24" style="text-align:right">
- <% if views = rv["short_view_count_text"]?.try &.delete(", views watching") %>
- <% if !views.empty? %>
- <b class="width:100%"><%= translate(locale, "`x` views", views) %></b>
- <% end %>
- <% end %>
- </div>
- </h5>
- </a>
+ <div class="pure-u-1">
+
+ <div class="thumbnail">
+ <%- if !env.get("preferences").as(Preferences).thin_mode -%>
+ <a tabindex="-1" href="/watch?v=<%= rv["id"] %>&listen=<%= params.listen %>">
+ <img loading="lazy" class="thumbnail" src="/vi/<%= rv["id"] %>/mqdefault.jpg" alt="" />
+ </a>
+ <%- else -%>
+ <div class="thumbnail-placeholder"></div>
+ <%- end -%>
+
+ <div class="bottom-right-overlay">
+ <%- if (length_seconds = rv["length_seconds"]?.try &.to_i?) && length_seconds != 0 -%>
+ <p class="length"><%= recode_length_seconds(length_seconds) %></p>
+ <%- end -%>
+ </div>
+ </div>
+
+ <div class="video-card-row">
+ <a href="/watch?v=<%= rv["id"] %>&listen=<%= params.listen %>"><p dir="auto"><%= HTML.escape(rv["title"]) %></p></a>
+ </div>
+
+ <h5 class="pure-g">
+ <div class="pure-u-14-24">
+ <% if !rv["ucid"].empty? %>
+ <b style="width:100%"><a href="/channel/<%= rv["ucid"] %>"><%= rv["author"]? %><% if rv["author_verified"]? == "true" %>&nbsp;<i class="icon ion ion-md-checkmark-circle"></i><% end %></a></b>
+ <% else %>
+ <b style="width:100%"><%= rv["author"]? %><% if rv["author_verified"]? == "true" %>&nbsp;<i class="icon ion ion-md-checkmark-circle"></i><% end %></b>
+ <% end %>
+ </div>
+
+ <div class="pure-u-10-24" style="text-align:right">
+ <b class="width:100%"><%=
+ views = rv["view_count"]?.try &.to_i?
+ views ||= rv["view_count_short"]?.try { |x| short_text_to_number(x) }
+ translate_count(locale, "generic_views_count", views || 0, NumberFormatting::Short)
+ %></b>
+ </div>
+ </h5>
+
+ </div>
<% end %>
<% end %>
</div>
@@ -323,4 +370,5 @@ we're going to need to do it here in order to allow for translations.
</div>
<% end %>
</div>
+<script src="/js/comments.js?v=<%= ASSET_COMMIT %>"></script>
<script src="/js/watch.js?v=<%= ASSET_COMMIT %>"></script>
diff --git a/src/invidious/yt_backend/connection_pool.cr b/src/invidious/yt_backend/connection_pool.cr
new file mode 100644
index 00000000..c4a73aa7
--- /dev/null
+++ b/src/invidious/yt_backend/connection_pool.cr
@@ -0,0 +1,116 @@
+# Mapping of subdomain => YoutubeConnectionPool
+# This is needed as we may need to access arbitrary subdomains of ytimg
+private YTIMG_POOLS = {} of String => YoutubeConnectionPool
+
+struct YoutubeConnectionPool
+ property! url : URI
+ property! capacity : Int32
+ property! timeout : Float64
+ property pool : DB::Pool(HTTP::Client)
+
+ def initialize(url : URI, @capacity = 5, @timeout = 5.0)
+ @url = url
+ @pool = build_pool()
+ end
+
+ def client(&)
+ conn = pool.checkout
+ # Proxy needs to be reinstated every time we get a client from the pool
+ conn.proxy = make_configured_http_proxy_client() if CONFIG.http_proxy
+
+ begin
+ response = yield conn
+ rescue ex
+ conn.close
+ conn = make_client(url, force_resolve: true)
+
+ response = yield conn
+ ensure
+ pool.release(conn)
+ end
+
+ response
+ end
+
+ private def build_pool
+ options = DB::Pool::Options.new(
+ initial_pool_size: 0,
+ max_pool_size: capacity,
+ max_idle_pool_size: capacity,
+ checkout_timeout: timeout
+ )
+
+ DB::Pool(HTTP::Client).new(options) do
+ next make_client(url, force_resolve: true)
+ end
+ end
+end
+
+def add_yt_headers(request)
+ request.headers.delete("User-Agent") if request.headers["User-Agent"] == "Crystal"
+ request.headers["User-Agent"] ||= "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36"
+
+ request.headers["Accept-Charset"] ||= "ISO-8859-1,utf-8;q=0.7,*;q=0.7"
+ request.headers["Accept"] ||= "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"
+ request.headers["Accept-Language"] ||= "en-us,en;q=0.5"
+
+ # Preserve original cookies and add new YT consent cookie for EU servers
+ request.headers["Cookie"] = "#{request.headers["cookie"]?}; CONSENT=PENDING+#{Random.rand(100..999)}"
+ if !CONFIG.cookies.empty?
+ request.headers["Cookie"] = "#{(CONFIG.cookies.map { |c| "#{c.name}=#{c.value}" }).join("; ")}; #{request.headers["cookie"]?}"
+ end
+end
+
+def make_client(url : URI, region = nil, force_resolve : Bool = false, force_youtube_headers : Bool = false)
+ client = HTTP::Client.new(url)
+ client.proxy = make_configured_http_proxy_client() if CONFIG.http_proxy
+
+ # Force the usage of a specific configured IP Family
+ if force_resolve
+ client.family = CONFIG.force_resolve
+ client.family = Socket::Family::INET if client.family == Socket::Family::UNSPEC
+ end
+
+ client.before_request { |r| add_yt_headers(r) } if url.host.try &.ends_with?("youtube.com") || force_youtube_headers
+ client.read_timeout = 10.seconds
+ client.connect_timeout = 10.seconds
+
+ return client
+end
+
+def make_client(url : URI, region = nil, force_resolve : Bool = false, &)
+ client = make_client(url, region, force_resolve: force_resolve)
+ begin
+ yield client
+ ensure
+ client.close
+ end
+end
+
+def make_configured_http_proxy_client
+ # This method is only called when configuration for an HTTP proxy are set
+ config_proxy = CONFIG.http_proxy.not_nil!
+
+ return HTTP::Proxy::Client.new(
+ config_proxy.host,
+ config_proxy.port,
+
+ username: config_proxy.user,
+ password: config_proxy.password,
+ )
+end
+
+# Fetches a HTTP pool for the specified subdomain of ytimg.com
+#
+# Creates a new one when the specified pool for the subdomain does not exist
+def get_ytimg_pool(subdomain)
+ if pool = YTIMG_POOLS[subdomain]?
+ return pool
+ else
+ LOGGER.info("ytimg_pool: Creating a new HTTP pool for \"https://#{subdomain}.ytimg.com\"")
+ pool = YoutubeConnectionPool.new(URI.parse("https://#{subdomain}.ytimg.com"), capacity: CONFIG.pool_size)
+ YTIMG_POOLS[subdomain] = pool
+
+ return pool
+ end
+end
diff --git a/src/invidious/yt_backend/extractors.cr b/src/invidious/yt_backend/extractors.cr
new file mode 100644
index 00000000..2631b62a
--- /dev/null
+++ b/src/invidious/yt_backend/extractors.cr
@@ -0,0 +1,1042 @@
+require "../helpers/serialized_yt_data"
+
+# This file contains helper methods to parse the Youtube API json data into
+# neat little packages we can use
+
+# Tuple of Parsers/Extractors so we can easily cycle through them.
+private ITEM_CONTAINER_EXTRACTOR = {
+ Extractors::YouTubeTabs,
+ Extractors::SearchResults,
+ Extractors::ContinuationContent,
+}
+
+private ITEM_PARSERS = {
+ Parsers::RichItemRendererParser,
+ Parsers::VideoRendererParser,
+ Parsers::ChannelRendererParser,
+ Parsers::GridPlaylistRendererParser,
+ Parsers::PlaylistRendererParser,
+ Parsers::CategoryRendererParser,
+ Parsers::ReelItemRendererParser,
+ Parsers::ItemSectionRendererParser,
+ Parsers::ContinuationItemRendererParser,
+ Parsers::HashtagRendererParser,
+ Parsers::LockupViewModelParser,
+}
+
+private alias InitialData = Hash(String, JSON::Any)
+
+record AuthorFallback, name : String, id : String
+
+# Namespace for logic relating to parsing InnerTube data into various datastructs.
+#
+# Each of the parsers in this namespace are accessed through the #process() method
+# which validates the given data as applicable to itself. If it is applicable the given
+# data is passed to the private `#parse()` method which returns a datastruct of the given
+# type. Otherwise, nil is returned.
+private module Parsers
+ # Parses a InnerTube videoRenderer into a SearchVideo. Returns nil when the given object isn't a videoRenderer
+ #
+ # A videoRenderer renders a video to click on within the YouTube and Invidious UI. It is **not**
+ # the watchable video itself.
+ #
+ # See specs for example.
+ #
+ # `videoRenderer`s can be found almost everywhere on YouTube. In categories, search results, channels, etc.
+ #
+ module VideoRendererParser
+ def self.process(item : JSON::Any, author_fallback : AuthorFallback)
+ if item_contents = (item["videoRenderer"]? || item["gridVideoRenderer"]?)
+ return self.parse(item_contents, author_fallback)
+ end
+ end
+
+ private def self.parse(item_contents, author_fallback)
+ video_id = item_contents["videoId"].as_s
+ title = extract_text(item_contents["title"]?) || ""
+
+ # Extract author information
+ if author_info = item_contents.dig?("ownerText", "runs", 0)
+ author = author_info["text"].as_s
+ author_id = HelperExtractors.get_browse_id(author_info)
+ elsif author_info = item_contents.dig?("shortBylineText", "runs", 0)
+ author = author_info["text"].as_s
+ author_id = HelperExtractors.get_browse_id(author_info)
+ else
+ author = author_fallback.name
+ author_id = author_fallback.id
+ end
+
+ author_verified = has_verified_badge?(item_contents["ownerBadges"]?)
+
+ # For live videos (and possibly recently premiered videos) there is no published information.
+ # Instead, in its place is the amount of people currently watching. This behavior should be replicated
+ # on Invidious once all features of livestreams are supported. On an unrelated note, defaulting to the current
+ # time for publishing isn't a good idea.
+ published = item_contents.dig?("publishedTimeText", "simpleText").try { |t| decode_date(t.as_s) } || Time.local
+
+ # Typically views are stored under a "simpleText" in the "viewCountText". However, for
+ # livestreams and premiered it is stored under a "runs" array: [{"text":123}, {"text": "watching"}]
+ # When view count is disabled the "viewCountText" is not present on InnerTube data.
+ # TODO change default value to nil and typical encoding type to tuple storing type (watchers, views, etc)
+ # and count
+ view_count = item_contents.dig?("viewCountText", "simpleText").try &.as_s.gsub(/\D+/, "").to_i64? || 0_i64
+ description_html = item_contents["descriptionSnippet"]?.try { |t| parse_content(t, video_id) } || ""
+
+ # The length information generally exist in "lengthText". However, the info can sometimes
+ # be retrieved from "thumbnailOverlays" (e.g when the video is a "shorts" one).
+ if length_container = item_contents["lengthText"]?
+ length_seconds = decode_length_seconds(length_container["simpleText"].as_s)
+ elsif length_container = item_contents["thumbnailOverlays"]?.try &.as_a.find(&.["thumbnailOverlayTimeStatusRenderer"]?)
+ # This needs to only go down the `simpleText` path (if possible). If more situations came up that requires
+ # a specific pathway then we should add an argument to extract_text that'll make this possible
+ length_text = length_container.dig?("thumbnailOverlayTimeStatusRenderer", "text", "simpleText")
+
+ if length_text
+ length_text = length_text.as_s
+
+ if length_text == "SHORTS"
+ # Approximate length to one minute, as "shorts" generally don't exceed that length.
+ # TODO: Add some sort of metadata for the type of video (normal, live, premiere, shorts)
+ length_seconds = 60_i32
+ else
+ length_seconds = decode_length_seconds(length_text)
+ end
+ else
+ length_seconds = 0
+ end
+ else
+ length_seconds = 0
+ end
+
+ premiere_timestamp = item_contents.dig?("upcomingEventData", "startTime").try { |t| Time.unix(t.as_s.to_i64) }
+ badges = VideoBadges::None
+ item_contents["badges"]?.try &.as_a.each do |badge|
+ b = badge["metadataBadgeRenderer"]
+ case b["label"].as_s
+ when "LIVE"
+ badges |= VideoBadges::LiveNow
+ when "New"
+ badges |= VideoBadges::New
+ when "4K"
+ badges |= VideoBadges::FourK
+ when "8K"
+ badges |= VideoBadges::EightK
+ when "VR180"
+ badges |= VideoBadges::VR180
+ when "360°"
+ badges |= VideoBadges::VR360
+ when "3D"
+ badges |= VideoBadges::ThreeD
+ when "CC"
+ badges |= VideoBadges::ClosedCaptions
+ when "Premium"
+ # TODO: Potentially available as item_contents["topStandaloneBadge"]["metadataBadgeRenderer"]
+ badges |= VideoBadges::Premium
+ else nil # Ignore
+ end
+ end
+
+ SearchVideo.new({
+ title: title,
+ id: video_id,
+ author: author,
+ ucid: author_id,
+ published: published,
+ views: view_count,
+ description_html: description_html,
+ length_seconds: length_seconds,
+ premiere_timestamp: premiere_timestamp,
+ author_verified: author_verified,
+ badges: badges,
+ })
+ end
+
+ def self.parser_name
+ return {{@type.name}}
+ end
+ end
+
+ # Parses a InnerTube channelRenderer into a SearchChannel. Returns nil when the given object isn't a channelRenderer
+ #
+ # A channelRenderer renders a channel to click on within the YouTube and Invidious UI. It is **not**
+ # the channel page itself.
+ #
+ # See specs for example.
+ #
+ # `channelRenderer`s can be found almost everywhere on YouTube. In categories, search results, channels, etc.
+ #
+ module ChannelRendererParser
+ def self.process(item : JSON::Any, author_fallback : AuthorFallback)
+ if item_contents = (item["channelRenderer"]? || item["gridChannelRenderer"]?)
+ return self.parse(item_contents, author_fallback)
+ end
+ end
+
+ private def self.parse(item_contents, author_fallback)
+ author = extract_text(item_contents["title"]) || author_fallback.name
+ author_id = item_contents["channelId"]?.try &.as_s || author_fallback.id
+ author_verified = has_verified_badge?(item_contents["ownerBadges"]?)
+ author_thumbnail = HelperExtractors.get_thumbnails(item_contents)
+
+ # When public subscriber count is disabled, the subscriberCountText isn't sent by InnerTube.
+ # Always simpleText
+ # TODO change default value to nil
+
+ subscriber_count = item_contents.dig?("subscriberCountText", "simpleText").try &.as_s
+ channel_handle = subscriber_count if (subscriber_count.try &.starts_with? "@")
+
+ # Since youtube added channel handles, `VideoCountText` holds the number of
+ # subscribers and `subscriberCountText` holds the handle, except when the
+ # channel doesn't have a handle (e.g: some topic music channels).
+ # See https://github.com/iv-org/invidious/issues/3394#issuecomment-1321261688
+ if !subscriber_count || !subscriber_count.includes? " subscriber"
+ subscriber_count = item_contents.dig?("videoCountText", "simpleText").try &.as_s
+ end
+ subscriber_count = subscriber_count
+ .try { |s| short_text_to_number(s.split(" ")[0]).to_i32 } || 0
+
+ # Auto-generated channels doesn't have videoCountText
+ # Taken from: https://github.com/iv-org/invidious/pull/2228#discussion_r717620922
+ auto_generated = item_contents["videoCountText"]?.nil?
+
+ video_count = HelperExtractors.get_video_count(item_contents)
+ description_html = item_contents["descriptionSnippet"]?.try { |t| parse_content(t) } || ""
+
+ SearchChannel.new({
+ author: author,
+ ucid: author_id,
+ author_thumbnail: author_thumbnail,
+ subscriber_count: subscriber_count,
+ video_count: video_count,
+ channel_handle: channel_handle,
+ description_html: description_html,
+ auto_generated: auto_generated,
+ author_verified: author_verified,
+ })
+ end
+
+ def self.parser_name
+ return {{@type.name}}
+ end
+ end
+
+ # Parses an Innertube `hashtagTileRenderer` into a `SearchHashtag`.
+ # Returns `nil` when the given object is not a `hashtagTileRenderer`.
+ #
+ # A `hashtagTileRenderer` is a kind of search result.
+ # It can be found when searching for any hashtag (e.g "#hi" or "#shorts")
+ module HashtagRendererParser
+ def self.process(item : JSON::Any, author_fallback : AuthorFallback)
+ if item_contents = item["hashtagTileRenderer"]?
+ return self.parse(item_contents)
+ end
+ end
+
+ private def self.parse(item_contents)
+ title = extract_text(item_contents["hashtag"]).not_nil! # E.g "#hi"
+
+ # E.g "/hashtag/hi"
+ url = item_contents.dig?("onTapCommand", "commandMetadata", "webCommandMetadata", "url").try &.as_s
+ url ||= URI.encode_path("/hashtag/#{title.lchop('#')}")
+
+ video_count_txt = extract_text(item_contents["hashtagVideoCount"]?) # E.g "203K videos"
+ channel_count_txt = extract_text(item_contents["hashtagChannelCount"]?) # E.g "81K channels"
+
+ # Fallback for video/channel counts
+ if channel_count_txt.nil? || video_count_txt.nil?
+ # E.g: "203K videos • 81K channels"
+ info_text = extract_text(item_contents["hashtagInfoText"]?).try &.split(" • ")
+
+ if info_text && info_text.size == 2
+ video_count_txt ||= info_text[0]
+ channel_count_txt ||= info_text[1]
+ end
+ end
+
+ return SearchHashtag.new({
+ title: title,
+ url: url,
+ video_count: short_text_to_number(video_count_txt || ""),
+ channel_count: short_text_to_number(channel_count_txt || ""),
+ })
+ rescue ex
+ LOGGER.debug("HashtagRendererParser: Failed to extract renderer.")
+ LOGGER.debug("HashtagRendererParser: Got exception: #{ex.message}")
+ return nil
+ end
+
+ def self.parser_name
+ return {{@type.name}}
+ end
+ end
+
+ # Parses a InnerTube gridPlaylistRenderer into a SearchPlaylist. Returns nil when the given object isn't a gridPlaylistRenderer
+ #
+ # A gridPlaylistRenderer renders a playlist, that is located in a grid, to click on within the YouTube and Invidious UI.
+ # It is **not** the playlist itself.
+ #
+ # See specs for example.
+ #
+ # `gridPlaylistRenderer`s can be found on the playlist-tabs of channels and expanded categories.
+ #
+ module GridPlaylistRendererParser
+ def self.process(item : JSON::Any, author_fallback : AuthorFallback)
+ if item_contents = item["gridPlaylistRenderer"]?
+ return self.parse(item_contents, author_fallback)
+ end
+ end
+
+ private def self.parse(item_contents, author_fallback)
+ title = extract_text(item_contents["title"]) || ""
+ plid = item_contents["playlistId"]?.try &.as_s || ""
+
+ author_verified = has_verified_badge?(item_contents["ownerBadges"]?)
+
+ video_count = HelperExtractors.get_video_count(item_contents)
+ playlist_thumbnail = HelperExtractors.get_thumbnails(item_contents)
+
+ SearchPlaylist.new({
+ title: title,
+ id: plid,
+ author: author_fallback.name,
+ ucid: author_fallback.id,
+ video_count: video_count,
+ videos: [] of SearchPlaylistVideo,
+ thumbnail: playlist_thumbnail,
+ author_verified: author_verified,
+ })
+ end
+
+ def self.parser_name
+ return {{@type.name}}
+ end
+ end
+
+ # Parses a InnerTube playlistRenderer into a SearchPlaylist. Returns nil when the given object isn't a playlistRenderer
+ #
+ # A playlistRenderer renders a playlist to click on within the YouTube and Invidious UI. It is **not** the playlist itself.
+ #
+ # See specs for example.
+ #
+ # `playlistRenderer`s can be found almost everywhere on YouTube. In categories, search results, recommended, etc.
+ #
+ module PlaylistRendererParser
+ def self.process(item : JSON::Any, author_fallback : AuthorFallback)
+ if item_contents = item["playlistRenderer"]?
+ return self.parse(item_contents, author_fallback)
+ end
+ end
+
+ private def self.parse(item_contents, author_fallback)
+ title = extract_text(item_contents["title"]) || ""
+ plid = item_contents["playlistId"]?.try &.as_s || ""
+
+ video_count = HelperExtractors.get_video_count(item_contents)
+ playlist_thumbnail = HelperExtractors.get_thumbnails_plural(item_contents)
+
+ author_info = item_contents.dig?("shortBylineText", "runs", 0)
+ author = author_info.try &.["text"].as_s || author_fallback.name
+ author_id = author_info.try { |x| HelperExtractors.get_browse_id(x) } || author_fallback.id
+ author_verified = has_verified_badge?(item_contents["ownerBadges"]?)
+
+ videos = item_contents["videos"]?.try &.as_a.map do |v|
+ v = v["childVideoRenderer"]
+ v_title = v.dig?("title", "simpleText").try &.as_s || ""
+ v_id = v["videoId"]?.try &.as_s || ""
+ v_length_seconds = v.dig?("lengthText", "simpleText").try { |t| decode_length_seconds(t.as_s) } || 0
+ SearchPlaylistVideo.new({
+ title: v_title,
+ id: v_id,
+ length_seconds: v_length_seconds,
+ })
+ end || [] of SearchPlaylistVideo
+
+ # TODO: item_contents["publishedTimeText"]?
+
+ SearchPlaylist.new({
+ title: title,
+ id: plid,
+ author: author,
+ ucid: author_id,
+ video_count: video_count,
+ videos: videos,
+ thumbnail: playlist_thumbnail,
+ author_verified: author_verified,
+ })
+ end
+
+ def self.parser_name
+ return {{@type.name}}
+ end
+ end
+
+ # Parses a InnerTube shelfRenderer into a Category. Returns nil when the given object isn't a shelfRenderer
+ #
+ # A shelfRenderer renders divided sections on YouTube. IE "People also watched" in search results and
+ # the various organizational sections in the channel home page. A separate one (richShelfRenderer) is used
+ # for YouTube home. A shelfRenderer can also sometimes be expanded to show more content within it.
+ #
+ # See specs for example.
+ #
+ # `shelfRenderer`s can be found almost everywhere on YouTube. In categories, search results, channels, etc.
+ #
+ module CategoryRendererParser
+ def self.process(item : JSON::Any, author_fallback : AuthorFallback)
+ if item_contents = item["shelfRenderer"]?
+ return self.parse(item_contents, author_fallback)
+ end
+ end
+
+ private def self.parse(item_contents, author_fallback)
+ title = extract_text(item_contents["title"]?) || ""
+ url = item_contents.dig?("endpoint", "commandMetadata", "webCommandMetadata", "url")
+ .try &.as_s
+
+ # Sometimes a category can have badges.
+ badges = [] of Tuple(String, String) # (Badge style, label)
+ item_contents["badges"]?.try &.as_a.each do |badge|
+ badge = badge["metadataBadgeRenderer"]
+ badges << {badge["style"].as_s, badge["label"].as_s}
+ end
+
+ # Category description
+ description_html = item_contents["subtitle"]?.try { |desc| parse_content(desc) } || ""
+
+ # Content parsing
+ contents = [] of SearchItem
+
+ # InnerTube recognizes some "special" categories, which are organized differently.
+ if special_category_container = item_contents["content"]?
+ if content_container = special_category_container["horizontalListRenderer"]?
+ elsif content_container = special_category_container["expandedShelfContentsRenderer"]?
+ elsif content_container = special_category_container["verticalListRenderer"]?
+ else
+ # Anything else, such as `horizontalMovieListRenderer` is currently unsupported.
+ return
+ end
+ else
+ # "Normal" category.
+ content_container = item_contents["contents"]
+ end
+
+ content_container["items"]?.try &.as_a.each do |item|
+ result = parse_item(item, author_fallback.name, author_fallback.id)
+ contents << result if result.is_a?(SearchItem)
+ end
+
+ Category.new({
+ title: title,
+ contents: contents,
+ description_html: description_html,
+ url: url,
+ badges: badges,
+ })
+ end
+
+ def self.parser_name
+ return {{@type.name}}
+ end
+ end
+
+ # Parses an InnerTube itemSectionRenderer into a SearchVideo.
+ # Returns nil when the given object isn't a ItemSectionRenderer
+ #
+ # A itemSectionRenderer seems to be a simple wrapper for a videoRenderer or a playlistRenderer, used
+ # by the result page for channel searches. It is located inside a continuationItems
+ # container.It is very similar to RichItemRendererParser
+ #
+ module ItemSectionRendererParser
+ def self.process(item : JSON::Any, author_fallback : AuthorFallback)
+ if item_contents = item.dig?("itemSectionRenderer", "contents", 0)
+ return self.parse(item_contents, author_fallback)
+ end
+ end
+
+ private def self.parse(item_contents, author_fallback)
+ child = VideoRendererParser.process(item_contents, author_fallback)
+ child ||= PlaylistRendererParser.process(item_contents, author_fallback)
+
+ return child
+ end
+
+ def self.parser_name
+ return {{@type.name}}
+ end
+ end
+
+ # Parses an InnerTube richItemRenderer into a SearchVideo.
+ # Returns nil when the given object isn't a RichItemRenderer
+ #
+ # A richItemRenderer seems to be a simple wrapper for a various other types,
+ # used on the hashtags result page and the channel podcast tab. It is located
+ # itself inside a richGridRenderer container.
+ #
+ module RichItemRendererParser
+ def self.process(item : JSON::Any, author_fallback : AuthorFallback)
+ if item_contents = item.dig?("richItemRenderer", "content")
+ return self.parse(item_contents, author_fallback)
+ end
+ end
+
+ private def self.parse(item_contents, author_fallback)
+ child = VideoRendererParser.process(item_contents, author_fallback)
+ child ||= ReelItemRendererParser.process(item_contents, author_fallback)
+ child ||= PlaylistRendererParser.process(item_contents, author_fallback)
+ child ||= LockupViewModelParser.process(item_contents, author_fallback)
+ child ||= ShortsLockupViewModelParser.process(item_contents, author_fallback)
+ return child
+ end
+
+ def self.parser_name
+ return {{@type.name}}
+ end
+ end
+
+ # Parses an InnerTube reelItemRenderer into a SearchVideo.
+ # Returns nil when the given object isn't a reelItemRenderer
+ #
+ # reelItemRenderer items are used in the new (2022) channel layout,
+ # in the "shorts" tab.
+ #
+ # NOTE: As of 10/2024, it might have been fully replaced by shortsLockupViewModel
+ # TODO: Confirm that hypothesis
+ #
+ module ReelItemRendererParser
+ def self.process(item : JSON::Any, author_fallback : AuthorFallback)
+ if item_contents = item["reelItemRenderer"]?
+ return self.parse(item_contents, author_fallback)
+ end
+ end
+
+ private def self.parse(item_contents, author_fallback)
+ video_id = item_contents["videoId"].as_s
+
+ reel_player_overlay = item_contents.dig(
+ "navigationEndpoint", "reelWatchEndpoint",
+ "overlay", "reelPlayerOverlayRenderer"
+ )
+
+ if video_details_container = reel_player_overlay.dig?(
+ "reelPlayerHeaderSupportedRenderers",
+ "reelPlayerHeaderRenderer"
+ )
+ # Author infos
+
+ author = video_details_container
+ .dig?("channelTitleText", "runs", 0, "text")
+ .try &.as_s || author_fallback.name
+
+ ucid = video_details_container
+ .dig?("channelNavigationEndpoint", "browseEndpoint", "browseId")
+ .try &.as_s || author_fallback.id
+
+ # Title & publication date
+
+ title = video_details_container.dig?("reelTitleText")
+ .try { |t| extract_text(t) } || ""
+
+ published = video_details_container
+ .dig?("timestampText", "simpleText")
+ .try { |t| decode_date(t.as_s) } || Time.utc
+
+ # View count
+ view_count_text = video_details_container.dig?("viewCountText", "simpleText")
+ else
+ author = author_fallback.name
+ ucid = author_fallback.id
+ published = Time.utc
+ title = item_contents.dig?("headline", "simpleText").try &.as_s || ""
+ end
+ # View count
+
+ # View count used to be in the reelWatchEndpoint, but that changed?
+ view_count_text ||= item_contents.dig?("viewCountText", "simpleText")
+
+ view_count = short_text_to_number(view_count_text.try &.as_s || "0")
+
+ # Duration
+
+ a11y_data = item_contents
+ .dig?("accessibility", "accessibilityData", "label")
+ .try &.as_s || ""
+
+ regex_match = /- (?<min>\d+ minutes? )?(?<sec>\d+ seconds?)+ -/.match(a11y_data)
+
+ minutes = regex_match.try &.["min"]?.try &.to_i(strict: false) || 0
+ seconds = regex_match.try &.["sec"]?.try &.to_i(strict: false) || 0
+
+ duration = (minutes*60 + seconds)
+
+ SearchVideo.new({
+ title: title,
+ id: video_id,
+ author: author,
+ ucid: ucid,
+ published: published,
+ views: view_count,
+ description_html: "",
+ length_seconds: duration,
+ premiere_timestamp: Time.unix(0),
+ author_verified: false,
+ badges: VideoBadges::None,
+ })
+ end
+
+ def self.parser_name
+ return {{@type.name}}
+ end
+ end
+
+ # Parses an InnerTube lockupViewModel into a SearchPlaylist.
+ # Returns nil when the given object is not a lockupViewModel.
+ #
+ # This structure is present since November 2024 on the "podcasts" and
+ # "playlists" tabs of the channel page. It is usually encapsulated in either
+ # a richItemRenderer or a richGridRenderer.
+ #
+ module LockupViewModelParser
+ def self.process(item : JSON::Any, author_fallback : AuthorFallback)
+ if item_contents = item["lockupViewModel"]?
+ return self.parse(item_contents, author_fallback)
+ end
+ end
+
+ private def self.parse(item_contents, author_fallback)
+ playlist_id = item_contents["contentId"].as_s
+
+ thumbnail_view_model = item_contents.dig(
+ "contentImage", "collectionThumbnailViewModel",
+ "primaryThumbnail", "thumbnailViewModel"
+ )
+
+ thumbnail = thumbnail_view_model.dig("image", "sources", 0, "url").as_s
+
+ # This complicated sequences tries to extract the following data structure:
+ # "overlays": [{
+ # "thumbnailOverlayBadgeViewModel": {
+ # "thumbnailBadges": [{
+ # "thumbnailBadgeViewModel": {
+ # "text": "430 episodes",
+ # "badgeStyle": "THUMBNAIL_OVERLAY_BADGE_STYLE_DEFAULT"
+ # }
+ # }]
+ # }
+ # }]
+ #
+ # NOTE: this simplistic `.to_i` conversion might not work on larger
+ # playlists and hasn't been tested.
+ video_count = thumbnail_view_model.dig("overlays").as_a
+ .compact_map(&.dig?("thumbnailOverlayBadgeViewModel", "thumbnailBadges").try &.as_a)
+ .flatten
+ .find(nil, &.dig?("thumbnailBadgeViewModel", "text").try { |node|
+ {"episodes", "videos"}.any? { |str| node.as_s.ends_with?(str) }
+ })
+ .try &.dig("thumbnailBadgeViewModel", "text").as_s.to_i(strict: false)
+
+ metadata = item_contents.dig("metadata", "lockupMetadataViewModel")
+ title = metadata.dig("title", "content").as_s
+
+ # TODO: Retrieve "updated" info from metadata parts
+ # rows = metadata.dig("metadata", "contentMetadataViewModel", "metadataRows").as_a
+ # parts_text = rows.map(&.dig?("metadataParts", "text", "content").try &.as_s)
+ # One of these parts should contain a string like: "Updated 2 days ago"
+
+ # TODO: Maybe add a button to access the first video of the playlist?
+ # item_contents.dig("rendererContext", "commandContext", "onTap", "innertubeCommand", "watchEndpoint")
+ # Available fields: "videoId", "playlistId", "params"
+
+ return SearchPlaylist.new({
+ title: title,
+ id: playlist_id,
+ author: author_fallback.name,
+ ucid: author_fallback.id,
+ video_count: video_count || -1,
+ videos: [] of SearchPlaylistVideo,
+ thumbnail: thumbnail,
+ author_verified: false,
+ })
+ end
+
+ def self.parser_name
+ return {{@type.name}}
+ end
+ end
+
+ # Parses an InnerTube shortsLockupViewModel into a SearchVideo.
+ # Returns nil when the given object is not a shortsLockupViewModel.
+ #
+ # This structure is present since around October 2024 on the "shorts" tab of
+ # the channel page and likely replaces the reelItemRenderer structure. It is
+ # usually (always?) encapsulated in a richItemRenderer.
+ #
+ module ShortsLockupViewModelParser
+ def self.process(item : JSON::Any, author_fallback : AuthorFallback)
+ if item_contents = item["shortsLockupViewModel"]?
+ return self.parse(item_contents, author_fallback)
+ end
+ end
+
+ private def self.parse(item_contents, author_fallback)
+ # TODO: Maybe add support for "oardefault.jpg" thumbnails?
+ # thumbnail = item_contents.dig("thumbnail", "sources", 0, "url").as_s
+ # Gives: https://i.ytimg.com/vi/{video_id}/oardefault.jpg?...
+
+ video_id = item_contents.dig(
+ "onTap", "innertubeCommand", "reelWatchEndpoint", "videoId"
+ ).as_s
+
+ title = item_contents.dig("overlayMetadata", "primaryText", "content").as_s
+
+ view_count = short_text_to_number(
+ item_contents.dig("overlayMetadata", "secondaryText", "content").as_s
+ )
+
+ # Approximate to one minute, as "shorts" generally don't exceed that.
+ # NOTE: The actual duration is not provided by Youtube anymore.
+ # TODO: Maybe use -1 as an error value and handle that on the frontend?
+ duration = 60_i32
+
+ SearchVideo.new({
+ title: title,
+ id: video_id,
+ author: author_fallback.name,
+ ucid: author_fallback.id,
+ published: Time.unix(0),
+ views: view_count,
+ description_html: "",
+ length_seconds: duration,
+ premiere_timestamp: Time.unix(0),
+ author_verified: false,
+ badges: VideoBadges::None,
+ })
+ end
+
+ def self.parser_name
+ return {{@type.name}}
+ end
+ end
+
+ # Parses an InnerTube continuationItemRenderer into a Continuation.
+ # Returns nil when the given object isn't a continuationItemRenderer.
+ #
+ # continuationItemRenderer contains various metadata ued to load more
+ # content (i.e when the user scrolls down). The interesting bit is the
+ # protobuf object known as the "continutation token". Previously, those
+ # were generated from sratch, but recent (as of 11/2022) Youtube changes
+ # are forcing us to extract them from replies.
+ #
+ module ContinuationItemRendererParser
+ def self.process(item : JSON::Any, author_fallback : AuthorFallback)
+ if item_contents = item["continuationItemRenderer"]?
+ return self.parse(item_contents)
+ end
+ end
+
+ private def self.parse(item_contents)
+ token = item_contents
+ .dig?("continuationEndpoint", "continuationCommand", "token")
+ .try &.as_s
+
+ return Continuation.new(token) if token
+ end
+
+ def self.parser_name
+ return {{@type.name}}
+ end
+ end
+end
+
+# The following are the extractors for extracting an array of items from
+# the internal Youtube API's JSON response. The result is then packaged into
+# a structure we can more easily use via the parsers above. Their internals are
+# identical to the item parsers.
+
+# Namespace for logic relating to extracting InnerTube's initial response to items we can parse.
+#
+# Each of the extractors in this namespace are accessed through the #process() method
+# which validates the given data as applicable to itself. If it is applicable the given
+# data is passed to the private `#extract()` method which returns an array of
+# parsable items. Otherwise, nil is returned.
+#
+# NOTE perhaps the result from here should be abstracted into a struct in order to
+# get additional metadata regarding the container of the item(s).
+private module Extractors
+ # Extracts items from the selected YouTube tab.
+ #
+ # YouTube tabs are typically stored under "twoColumnBrowseResultsRenderer"
+ # and is structured like this:
+ #
+ # "twoColumnBrowseResultsRenderer": {
+ # {"tabs": [
+ # {"tabRenderer": {
+ # "endpoint": {...}
+ # "title": "Playlists",
+ # "selected": true, # Is nil unless tab is selected
+ # "content": {...},
+ # ...
+ # }}
+ # ]}
+ # }]
+ #
+ module YouTubeTabs
+ def self.process(initial_data : InitialData)
+ if target = initial_data["twoColumnBrowseResultsRenderer"]?
+ self.extract(target)
+ end
+ end
+
+ private def self.extract(target)
+ raw_items = [] of JSON::Any
+ content = extract_selected_tab(target["tabs"])["content"]
+
+ if section_list_contents = content.dig?("sectionListRenderer", "contents")
+ raw_items = unpack_section_list(section_list_contents)
+ elsif rich_grid_contents = content.dig?("richGridRenderer", "contents")
+ raw_items = rich_grid_contents.as_a
+ end
+
+ return raw_items
+ end
+
+ private def self.unpack_section_list(contents)
+ raw_items = [] of JSON::Any
+
+ contents.as_a.each do |item|
+ if item_section_content = item.dig?("itemSectionRenderer", "contents")
+ raw_items += self.unpack_item_section(item_section_content)
+ else
+ raw_items << item
+ end
+ end
+
+ return raw_items
+ end
+
+ private def self.unpack_item_section(contents)
+ raw_items = [] of JSON::Any
+
+ contents.as_a.each do |item|
+ # Category extraction
+ if container = item.dig?("gridRenderer", "items") || item.dig?("items")
+ raw_items += container.as_a
+ else
+ raw_items << item
+ end
+ end
+
+ return raw_items
+ end
+
+ def self.extractor_name
+ return {{@type.name}}
+ end
+ end
+
+ # Extracts items from the InnerTube response for search results
+ #
+ # Search results are typically stored under "twoColumnSearchResultsRenderer"
+ # and is structured like this:
+ #
+ # "twoColumnSearchResultsRenderer": {
+ # {"primaryContents": {
+ # {"sectionListRenderer": {
+ # "contents": [...],
+ # ...,
+ # "subMenu": {...},
+ # "hideBottomSeparator": true,
+ # "targetId": "search-feed"
+ # }}
+ # }}
+ # }
+ #
+ module SearchResults
+ def self.process(initial_data : InitialData)
+ if target = initial_data["twoColumnSearchResultsRenderer"]?
+ self.extract(target)
+ end
+ end
+
+ private def self.extract(target)
+ raw_items = [] of Array(JSON::Any)
+
+ target.dig("primaryContents", "sectionListRenderer", "contents").as_a.each do |node|
+ if node = node["itemSectionRenderer"]?
+ raw_items << node["contents"].as_a
+ end
+ end
+
+ return raw_items.flatten
+ end
+
+ def self.extractor_name
+ return {{@type.name}}
+ end
+ end
+
+ # Extracts continuation items from a InnerTube response
+ #
+ # Continuation items (on YouTube) are items which are appended to the
+ # end of the page for continuous scrolling. As such, in many cases,
+ # the items are lacking information such as author or category title,
+ # since the original results has already rendered them on the top of the page.
+ #
+ # The way they are structured is too varied to be accurately written down here.
+ # However, they all eventually lead to an array of parsable items after traversing
+ # through the JSON structure.
+ module ContinuationContent
+ def self.process(initial_data : InitialData)
+ if target = initial_data["continuationContents"]?
+ self.extract(target)
+ elsif target = initial_data["appendContinuationItemsAction"]?
+ self.extract(target)
+ elsif target = initial_data["reloadContinuationItemsCommand"]?
+ self.extract(target)
+ end
+ end
+
+ private def self.extract(target)
+ content = target["continuationItems"]?
+ content ||= target.dig?("gridContinuation", "items")
+ content ||= target.dig?("richGridContinuation", "contents")
+
+ return content.nil? ? [] of JSON::Any : content.as_a
+ end
+
+ def self.extractor_name
+ return {{@type.name}}
+ end
+ end
+end
+
+# Helper methods to aid in the parsing of InnerTube to data structs.
+#
+# Mostly used to extract out repeated structures to deal with code
+# repetition.
+module HelperExtractors
+ # Retrieves the amount of videos present within the given InnerTube data.
+ #
+ # Returns a 0 when it's unable to do so
+ def self.get_video_count(container : JSON::Any) : Int32
+ if box = container["videoCountText"]?
+ if (extracted_text = extract_text(box)) && !extracted_text.includes? " subscriber"
+ return extracted_text.gsub(/\D/, "").to_i
+ else
+ return 0
+ end
+ elsif box = container["videoCount"]?
+ return box.as_s.to_i
+ else
+ return 0
+ end
+ end
+
+ # Retrieves the amount of views/viewers a video has.
+ # Seems to be used on related videos only
+ #
+ # Returns "0" when unable to parse
+ def self.get_short_view_count(container : JSON::Any) : String
+ box = container["shortViewCountText"]?
+ return "0" if !box
+
+ # Simpletext: "4M views"
+ # runs: {"text": "1.1K"},{"text":" watching"}
+ return box["simpleText"]?.try &.as_s.sub(" views", "") ||
+ box.dig?("runs", 0, "text").try &.as_s || "0"
+ end
+
+ # Retrieve lowest quality thumbnail from InnerTube data
+ #
+ # TODO allow configuration of image quality (-1 is highest)
+ #
+ # Raises when it's unable to parse from the given JSON data.
+ def self.get_thumbnails(container : JSON::Any) : String
+ return container.dig("thumbnail", "thumbnails", 0, "url").as_s
+ end
+
+ # ditto
+ #
+ # YouTube sometimes sends the thumbnail as:
+ # {"thumbnails": [{"thumbnails": [{"url": "example.com"}, ...]}]}
+ def self.get_thumbnails_plural(container : JSON::Any) : String
+ return container.dig("thumbnails", 0, "thumbnails", 0, "url").as_s
+ end
+
+ # Retrieves the ID required for querying the InnerTube browse endpoint.
+ # Returns an empty string when it's unable to do so
+ def self.get_browse_id(container)
+ return container.dig?("navigationEndpoint", "browseEndpoint", "browseId").try &.as_s || ""
+ end
+end
+
+# Parses an item from Youtube's JSON response into a more usable structure.
+# The end result can either be a SearchVideo, SearchPlaylist or SearchChannel.
+def parse_item(item : JSON::Any, author_fallback : String? = "", author_id_fallback : String? = "")
+ # We "allow" nil values but secretly use empty strings instead. This is to save us the
+ # hassle of modifying every author_fallback and author_id_fallback arg usage
+ # which is more often than not nil.
+ author_fallback = AuthorFallback.new(author_fallback || "", author_id_fallback || "")
+
+ # Cycles through all of the item parsers and attempt to parse the raw YT JSON data.
+ # Each parser automatically validates the data given to see if the data is
+ # applicable to itself. If not nil is returned and the next parser is attempted.
+ ITEM_PARSERS.each do |parser|
+ LOGGER.trace("parse_item: Attempting to parse item using \"#{parser.parser_name}\" (cycling...)")
+
+ if result = parser.process(item, author_fallback)
+ LOGGER.debug("parse_item: Successfully parsed via #{parser.parser_name}")
+ return result
+ else
+ LOGGER.trace("parse_item: Parser \"#{parser.parser_name}\" does not apply. Cycling to the next one...")
+ end
+ end
+end
+
+# Parses multiple items from YouTube's initial JSON response into a more usable structure.
+# The end result is an array of SearchItem.
+#
+# This function yields the container so that items can be parsed separately.
+#
+def extract_items(initial_data : InitialData, &)
+ if unpackaged_data = initial_data["contents"]?.try &.as_h
+ elsif unpackaged_data = initial_data["response"]?.try &.as_h
+ elsif unpackaged_data = initial_data.dig?("onResponseReceivedActions", 1).try &.as_h
+ elsif unpackaged_data = initial_data.dig?("onResponseReceivedActions", 0).try &.as_h
+ else
+ unpackaged_data = initial_data
+ end
+
+ # This is identical to the parser cycling of parse_item().
+ ITEM_CONTAINER_EXTRACTOR.each do |extractor|
+ LOGGER.trace("extract_items: Attempting to extract item container using \"#{extractor.extractor_name}\" (cycling...)")
+
+ if container = extractor.process(unpackaged_data)
+ LOGGER.debug("extract_items: Successfully unpacked container with \"#{extractor.extractor_name}\"")
+ # Extract items in container
+ container.each { |item| yield item }
+ else
+ LOGGER.trace("extract_items: Extractor \"#{extractor.extractor_name}\" does not apply. Cycling to the next one...")
+ end
+ end
+end
+
+# Wrapper using the block function above
+def extract_items(
+ initial_data : InitialData,
+ author_fallback : String? = nil,
+ author_id_fallback : String? = nil
+) : {Array(SearchItem), String?}
+ items = [] of SearchItem
+ continuation = nil
+
+ extract_items(initial_data) do |item|
+ parsed = parse_item(item, author_fallback, author_id_fallback)
+
+ case parsed
+ when .is_a?(Continuation) then continuation = parsed.token
+ when .is_a?(SearchItem) then items << parsed
+ end
+ end
+
+ return items, continuation
+end
diff --git a/src/invidious/yt_backend/extractors_utils.cr b/src/invidious/yt_backend/extractors_utils.cr
new file mode 100644
index 00000000..c83a2de5
--- /dev/null
+++ b/src/invidious/yt_backend/extractors_utils.cr
@@ -0,0 +1,87 @@
+# Extracts text from InnerTube response
+#
+# InnerTube can package text in three different formats
+# "runs": [
+# {"text": "something"},
+# {"text": "cont"},
+# ...
+# ]
+#
+# "SimpleText": "something"
+#
+# Or sometimes just none at all as with the data returned from
+# category continuations.
+#
+# In order to facilitate calling this function with `#[]?`:
+# A nil will be accepted. Of course, since nil cannot be parsed,
+# another nil will be returned.
+def extract_text(item : JSON::Any?) : String?
+ if item.nil?
+ return nil
+ end
+
+ if text_container = item["simpleText"]?
+ return text_container.as_s
+ elsif text_container = item["runs"]?
+ return text_container.as_a.map(&.["text"].as_s).join("")
+ else
+ nil
+ end
+end
+
+# Check if an "ownerBadges" or a "badges" element contains a verified badge.
+# There is currently two known types of verified badges:
+#
+# "ownerBadges": [{
+# "metadataBadgeRenderer": {
+# "icon": { "iconType": "CHECK_CIRCLE_THICK" },
+# "style": "BADGE_STYLE_TYPE_VERIFIED",
+# "tooltip": "Verified",
+# "accessibilityData": { "label": "Verified" }
+# }
+# }],
+#
+# "ownerBadges": [{
+# "metadataBadgeRenderer": {
+# "icon": { "iconType": "OFFICIAL_ARTIST_BADGE" },
+# "style": "BADGE_STYLE_TYPE_VERIFIED_ARTIST",
+# "tooltip": "Official Artist Channel",
+# "accessibilityData": { "label": "Official Artist Channel" }
+# }
+# }],
+#
+def has_verified_badge?(badges : JSON::Any?)
+ return false if badges.nil?
+
+ badges.as_a.each do |badge|
+ style = badge.dig("metadataBadgeRenderer", "style").as_s
+
+ return true if style == "BADGE_STYLE_TYPE_VERIFIED"
+ return true if style == "BADGE_STYLE_TYPE_VERIFIED_ARTIST"
+ end
+
+ return false
+rescue ex
+ LOGGER.debug("Unable to parse owner badges. Got exception: #{ex.message}")
+ LOGGER.trace("Owner badges data: #{badges.to_json}")
+
+ return false
+end
+
+# This function extracts SearchVideo items from a Category.
+# Categories are commonly returned in search results and trending pages.
+def extract_category(category : Category) : Array(SearchVideo)
+ return category.contents.select(SearchVideo)
+end
+
+# :ditto:
+def extract_category(category : Category, &)
+ category.contents.select(SearchVideo).each do |item|
+ yield item
+ end
+end
+
+def extract_selected_tab(tabs)
+ # Extract the selected tab from the array of tabs Youtube returns
+ return tabs.as_a.select(&.["tabRenderer"]?.try &.["selected"]?.try &.as_bool)[0]["tabRenderer"]
+end
diff --git a/src/invidious/yt_backend/url_sanitizer.cr b/src/invidious/yt_backend/url_sanitizer.cr
new file mode 100644
index 00000000..d539dadb
--- /dev/null
+++ b/src/invidious/yt_backend/url_sanitizer.cr
@@ -0,0 +1,121 @@
+require "uri"
+
+module UrlSanitizer
+ extend self
+
+ ALLOWED_QUERY_PARAMS = {
+ channel: ["u", "user", "lb"],
+ playlist: ["list"],
+ search: ["q", "search_query", "sp"],
+ watch: [
+ "v", # Video ID
+ "list", "index", # Playlist-related
+ "playlist", # Unnamed playlist (id,id,id,...) (embed-only?)
+ "t", "time_continue", "start", "end", # Timestamp
+ "lc", # Highlighted comment (watch page only)
+ ],
+ }
+
+ # Returns whether the given string is an ASCII word. This is the same as
+ # running the following regex in US-ASCII locale: /^[\w-]+$/
+ private def ascii_word?(str : String) : Bool
+ return false if str.bytesize != str.size
+
+ str.each_byte do |byte|
+ next if 'a'.ord <= byte <= 'z'.ord
+ next if 'A'.ord <= byte <= 'Z'.ord
+ next if '0'.ord <= byte <= '9'.ord
+ next if byte == '-'.ord || byte == '_'.ord
+
+ return false
+ end
+
+ return true
+ end
+
+ # Return which kind of parameters are allowed based on the
+ # first path component (breadcrumb 0).
+ private def determine_allowed(path_root : String)
+ case path_root
+ when "watch", "w", "v", "embed", "e", "shorts", "clip"
+ return :watch
+ when .starts_with?("@"), "c", "channel", "user", "profile", "attribution_link"
+ return :channel
+ when "playlist", "mix"
+ return :playlist
+ when "results", "search"
+ return :search
+ else # hashtag, post, trending, brand URLs, etc..
+ return nil
+ end
+ end
+
+ # Create a new URI::Param containing only the allowed parameters
+ private def copy_params(unsafe_params : URI::Params, allowed_type) : URI::Params
+ new_params = URI::Params.new
+
+ ALLOWED_QUERY_PARAMS[allowed_type].each do |name|
+ if unsafe_params[name]?
+ # Only copy the last parameter, in case there is more than one
+ new_params[name] = unsafe_params.fetch_all(name)[-1]
+ end
+ end
+
+ return new_params
+ end
+
+ # Transform any user-supplied youtube URL into something we can trust
+ # and use across the code.
+ def process(str : String) : URI
+ # Because URI follows RFC3986 specifications, URL without a scheme
+ # will be parsed as a relative path. So we have to add a scheme ourselves.
+ str = "https://#{str}" if !str.starts_with?(/https?:\/\//)
+
+ unsafe_uri = URI.parse(str)
+ unsafe_host = unsafe_uri.host
+ unsafe_path = unsafe_uri.path
+
+ new_uri = URI.new(path: "/")
+
+ # Redirect to homepage for bogus URLs
+ return new_uri if (unsafe_host.nil? || unsafe_path.nil?)
+
+ breadcrumbs = unsafe_path
+ .split('/', remove_empty: true)
+ .compact_map do |bc|
+ # Exclude attempts at path trasversal
+ next if bc == "." || bc == ".."
+
+ # Non-alnum characters are unlikely in a genuine URL
+ next if !ascii_word?(bc)
+
+ bc
+ end
+
+ # If nothing remains, it's either a legit URL to the homepage
+ # (who does that!?) or because we filtered some junk earlier.
+ return new_uri if breadcrumbs.empty?
+
+ # Replace the original query parameters with the sanitized ones
+ case unsafe_host
+ when .ends_with?("youtube.com")
+ # Use our sanitized path (not forgetting the leading '/')
+ new_uri.path = "/#{breadcrumbs.join('/')}"
+
+ # Then determine which params are allowed, and copy them over
+ if allowed = determine_allowed(breadcrumbs[0])
+ new_uri.query_params = copy_params(unsafe_uri.query_params, allowed)
+ end
+ when "youtu.be"
+ # Always redirect to the watch page
+ new_uri.path = "/watch"
+
+ new_params = copy_params(unsafe_uri.query_params, :watch)
+ new_params["v"] = breadcrumbs[0]
+
+ new_uri.query_params = new_params
+ end
+
+ return new_uri
+ end
+end
diff --git a/src/invidious/yt_backend/youtube_api.cr b/src/invidious/yt_backend/youtube_api.cr
new file mode 100644
index 00000000..8f5aa61d
--- /dev/null
+++ b/src/invidious/yt_backend/youtube_api.cr
@@ -0,0 +1,699 @@
+#
+# This file contains youtube API wrappers
+#
+
+module YoutubeAPI
+ extend self
+
+ # For Android versions, see https://en.wikipedia.org/wiki/Android_version_history
+ private ANDROID_APP_VERSION = "19.32.34"
+ private ANDROID_VERSION = "12"
+ private ANDROID_USER_AGENT = "com.google.android.youtube/#{ANDROID_APP_VERSION} (Linux; U; Android #{ANDROID_VERSION}; US) gzip"
+ private ANDROID_SDK_VERSION = 31_i64
+
+ private ANDROID_TS_APP_VERSION = "1.9"
+ private ANDROID_TS_USER_AGENT = "com.google.android.youtube/1.9 (Linux; U; Android 12; US) gzip"
+
+ # For Apple device names, see https://gist.github.com/adamawolf/3048717
+ # For iOS versions, see https://en.wikipedia.org/wiki/IOS_version_history#Releases,
+ # then go to the dedicated article of the major version you want.
+ private IOS_APP_VERSION = "19.32.8"
+ private IOS_USER_AGENT = "com.google.ios.youtube/#{IOS_APP_VERSION} (iPhone14,5; U; CPU iOS 17_6 like Mac OS X;)"
+ private IOS_VERSION = "17.6.1.21G93" # Major.Minor.Patch.Build
+
+ private WINDOWS_VERSION = "10.0"
+
+ # Enumerate used to select one of the clients supported by the API
+ enum ClientType
+ Web
+ WebEmbeddedPlayer
+ WebMobile
+ WebScreenEmbed
+ WebCreator
+
+ Android
+ AndroidEmbeddedPlayer
+ AndroidScreenEmbed
+ AndroidTestSuite
+
+ IOS
+ IOSEmbedded
+ IOSMusic
+
+ TvHtml5
+ TvHtml5ScreenEmbed
+ end
+
+ # List of hard-coded values used by the different clients
+ HARDCODED_CLIENTS = {
+ ClientType::Web => {
+ name: "WEB",
+ name_proto: "1",
+ version: "2.20240814.00.00",
+ screen: "WATCH_FULL_SCREEN",
+ os_name: "Windows",
+ os_version: WINDOWS_VERSION,
+ platform: "DESKTOP",
+ },
+ ClientType::WebEmbeddedPlayer => {
+ name: "WEB_EMBEDDED_PLAYER",
+ name_proto: "56",
+ version: "1.20240812.01.00",
+ screen: "EMBED",
+ os_name: "Windows",
+ os_version: WINDOWS_VERSION,
+ platform: "DESKTOP",
+ },
+ ClientType::WebMobile => {
+ name: "MWEB",
+ name_proto: "2",
+ version: "2.20240813.02.00",
+ os_name: "Android",
+ os_version: ANDROID_VERSION,
+ platform: "MOBILE",
+ },
+ ClientType::WebScreenEmbed => {
+ name: "WEB",
+ name_proto: "1",
+ version: "2.20240814.00.00",
+ screen: "EMBED",
+ os_name: "Windows",
+ os_version: WINDOWS_VERSION,
+ platform: "DESKTOP",
+ },
+ ClientType::WebCreator => {
+ name: "WEB_CREATOR",
+ name_proto: "62",
+ version: "1.20240918.03.00",
+ os_name: "Windows",
+ os_version: WINDOWS_VERSION,
+ platform: "DESKTOP",
+ },
+
+ # Android
+
+ ClientType::Android => {
+ name: "ANDROID",
+ name_proto: "3",
+ version: ANDROID_APP_VERSION,
+ android_sdk_version: ANDROID_SDK_VERSION,
+ user_agent: ANDROID_USER_AGENT,
+ os_name: "Android",
+ os_version: ANDROID_VERSION,
+ platform: "MOBILE",
+ },
+ ClientType::AndroidEmbeddedPlayer => {
+ name: "ANDROID_EMBEDDED_PLAYER",
+ name_proto: "55",
+ version: ANDROID_APP_VERSION,
+ },
+ ClientType::AndroidScreenEmbed => {
+ name: "ANDROID",
+ name_proto: "3",
+ version: ANDROID_APP_VERSION,
+ screen: "EMBED",
+ android_sdk_version: ANDROID_SDK_VERSION,
+ user_agent: ANDROID_USER_AGENT,
+ os_name: "Android",
+ os_version: ANDROID_VERSION,
+ platform: "MOBILE",
+ },
+ ClientType::AndroidTestSuite => {
+ name: "ANDROID_TESTSUITE",
+ name_proto: "30",
+ version: ANDROID_TS_APP_VERSION,
+ android_sdk_version: ANDROID_SDK_VERSION,
+ user_agent: ANDROID_TS_USER_AGENT,
+ os_name: "Android",
+ os_version: ANDROID_VERSION,
+ platform: "MOBILE",
+ },
+
+ # IOS
+
+ ClientType::IOS => {
+ name: "IOS",
+ name_proto: "5",
+ version: IOS_APP_VERSION,
+ user_agent: IOS_USER_AGENT,
+ device_make: "Apple",
+ device_model: "iPhone14,5",
+ os_name: "iPhone",
+ os_version: IOS_VERSION,
+ platform: "MOBILE",
+ },
+ ClientType::IOSEmbedded => {
+ name: "IOS_MESSAGES_EXTENSION",
+ name_proto: "66",
+ version: IOS_APP_VERSION,
+ user_agent: IOS_USER_AGENT,
+ device_make: "Apple",
+ device_model: "iPhone14,5",
+ os_name: "iPhone",
+ os_version: IOS_VERSION,
+ platform: "MOBILE",
+ },
+ ClientType::IOSMusic => {
+ name: "IOS_MUSIC",
+ name_proto: "26",
+ version: "7.14",
+ user_agent: "com.google.ios.youtubemusic/7.14 (iPhone14,5; U; CPU iOS 17_6 like Mac OS X;)",
+ device_make: "Apple",
+ device_model: "iPhone14,5",
+ os_name: "iPhone",
+ os_version: IOS_VERSION,
+ platform: "MOBILE",
+ },
+
+ # TV app
+
+ ClientType::TvHtml5 => {
+ name: "TVHTML5",
+ name_proto: "7",
+ version: "7.20240813.07.00",
+ },
+ ClientType::TvHtml5ScreenEmbed => {
+ name: "TVHTML5_SIMPLY_EMBEDDED_PLAYER",
+ name_proto: "85",
+ version: "2.0",
+ screen: "EMBED",
+ },
+ }
+
+ ####################################################################
+ # struct ClientConfig
+ #
+ # Data structure used to pass a client configuration to the different
+ # API endpoints handlers.
+ #
+ # Use case examples:
+ #
+ # ```
+ # # Get Norwegian search results
+ # conf_1 = ClientConfig.new(region: "NO")
+ # YoutubeAPI::search("Kollektivet", params: "", client_config: conf_1)
+ #
+ # # Use the Android client to request video streams URLs
+ # conf_2 = ClientConfig.new(client_type: ClientType::Android)
+ # YoutubeAPI::player(video_id: "dQw4w9WgXcQ", client_config: conf_2)
+ #
+ #
+ struct ClientConfig
+ # Type of client to emulate.
+ # See `enum ClientType` and `HARDCODED_CLIENTS`.
+ property client_type : ClientType
+
+ # Region to provide to youtube, e.g to alter search results
+ # (this is passed as the `gl` parameter).
+ property region : String | Nil
+
+ # Initialization function
+ def initialize(
+ *,
+ @client_type = ClientType::Web,
+ @region = "US"
+ )
+ end
+
+ # Getter functions that provides easy access to hardcoded clients
+ # parameters (name/version strings and related API key)
+ def name : String
+ HARDCODED_CLIENTS[@client_type][:name]
+ end
+
+ def name_proto : String
+ HARDCODED_CLIENTS[@client_type][:name_proto]
+ end
+
+ # :ditto:
+ def version : String
+ HARDCODED_CLIENTS[@client_type][:version]
+ end
+
+ # :ditto:
+ def screen : String
+ HARDCODED_CLIENTS[@client_type][:screen]? || ""
+ end
+
+ def android_sdk_version : Int64?
+ HARDCODED_CLIENTS[@client_type][:android_sdk_version]?
+ end
+
+ def user_agent : String?
+ HARDCODED_CLIENTS[@client_type][:user_agent]?
+ end
+
+ def os_name : String?
+ HARDCODED_CLIENTS[@client_type][:os_name]?
+ end
+
+ def device_make : String?
+ HARDCODED_CLIENTS[@client_type][:device_make]?
+ end
+
+ def device_model : String?
+ HARDCODED_CLIENTS[@client_type][:device_model]?
+ end
+
+ def os_version : String?
+ HARDCODED_CLIENTS[@client_type][:os_version]?
+ end
+
+ def platform : String?
+ HARDCODED_CLIENTS[@client_type][:platform]?
+ end
+
+ # Convert to string, for logging purposes
+ def to_s
+ return {
+ client_type: self.name,
+ region: @region,
+ }.to_s
+ end
+ end
+
+ # Default client config, used if nothing is passed
+ DEFAULT_CLIENT_CONFIG = ClientConfig.new
+
+ ####################################################################
+ # make_context(client_config)
+ #
+ # Return, as a Hash, the "context" data required to request the
+ # youtube API endpoints.
+ #
+ private def make_context(client_config : ClientConfig | Nil, video_id = "dQw4w9WgXcQ") : Hash
+ # Use the default client config if nil is passed
+ client_config ||= DEFAULT_CLIENT_CONFIG
+
+ client_context = {
+ "client" => {
+ "hl" => "en",
+ "gl" => client_config.region || "US", # Can't be empty!
+ "clientName" => client_config.name,
+ "clientVersion" => client_config.version,
+ } of String => String | Int64,
+ }
+
+ # Add some more context if it exists in the client definitions
+ if !client_config.screen.empty?
+ client_context["client"]["clientScreen"] = client_config.screen
+ end
+
+ if client_config.screen == "EMBED"
+ client_context["thirdParty"] = {
+ "embedUrl" => "https://www.youtube.com/embed/#{video_id}",
+ } of String => String | Int64
+ end
+
+ if android_sdk_version = client_config.android_sdk_version
+ client_context["client"]["androidSdkVersion"] = android_sdk_version
+ end
+
+ if device_make = client_config.device_make
+ client_context["client"]["deviceMake"] = device_make
+ end
+
+ if device_model = client_config.device_model
+ client_context["client"]["deviceModel"] = device_model
+ end
+
+ if os_name = client_config.os_name
+ client_context["client"]["osName"] = os_name
+ end
+
+ if os_version = client_config.os_version
+ client_context["client"]["osVersion"] = os_version
+ end
+
+ if platform = client_config.platform
+ client_context["client"]["platform"] = platform
+ end
+
+ if CONFIG.visitor_data.is_a?(String)
+ client_context["client"]["visitorData"] = CONFIG.visitor_data.as(String)
+ end
+
+ return client_context
+ end
+
+ ####################################################################
+ # browse(continuation, client_config?)
+ # browse(browse_id, params, client_config?)
+ #
+ # Requests the youtubei/v1/browse endpoint with the required headers
+ # and POST data in order to get a JSON reply in english that can
+ # be easily parsed.
+ #
+ # Both forms can take an optional ClientConfig parameter (see
+ # `struct ClientConfig` above for more details).
+ #
+ # The requested data can either be:
+ #
+ # - A continuation token (ctoken). Depending on this token's
+ # contents, the returned data can be playlist videos, channel
+ # community tab content, channel info, ...
+ #
+ # - A playlist ID (parameters MUST be an empty string)
+ #
+ def browse(continuation : String, client_config : ClientConfig | Nil = nil)
+ # JSON Request data, required by the API
+ data = {
+ "context" => self.make_context(client_config),
+ "continuation" => continuation,
+ }
+
+ return self._post_json("/youtubei/v1/browse", data, client_config)
+ end
+
+ # :ditto:
+ def browse(
+ browse_id : String,
+ *, # Force the following parameters to be passed by name
+ params : String,
+ client_config : ClientConfig | Nil = nil
+ )
+ # JSON Request data, required by the API
+ data = {
+ "browseId" => browse_id,
+ "context" => self.make_context(client_config),
+ }
+
+ # Append the additional parameters if those were provided
+ # (this is required for channel info, playlist and community, e.g)
+ if params != ""
+ data["params"] = params
+ end
+
+ return self._post_json("/youtubei/v1/browse", data, client_config)
+ end
+
+ ####################################################################
+ # next(continuation, client_config?)
+ # next(data, client_config?)
+ #
+ # Requests the youtubei/v1/next endpoint with the required headers
+ # and POST data in order to get a JSON reply in english that can
+ # be easily parsed.
+ #
+ # Both forms can take an optional ClientConfig parameter (see
+ # `struct ClientConfig` above for more details).
+ #
+ # The requested data can be:
+ #
+ # - A continuation token (ctoken). Depending on this token's
+ # contents, the returned data can be videos comments,
+ # their replies, ... In this case, the string must be passed
+ # directly to the function. E.g:
+ #
+ # ```
+ # YoutubeAPI::next("ABCDEFGH_abcdefgh==")
+ # ```
+ #
+ # - Arbitrary parameters, in Hash form. See examples below for
+ # known examples of arbitrary data that can be passed to YouTube:
+ #
+ # ```
+ # # Get the videos related to a specific video ID
+ # YoutubeAPI::next({"videoId" => "dQw4w9WgXcQ"})
+ #
+ # # Get a playlist video's details
+ # YoutubeAPI::next({
+ # "videoId" => "9bZkp7q19f0",
+ # "playlistId" => "PL_oFlvgqkrjUVQwiiE3F3k3voF4tjXeP0",
+ # })
+ # ```
+ #
+ def next(continuation : String, *, client_config : ClientConfig | Nil = nil)
+ # JSON Request data, required by the API
+ data = {
+ "context" => self.make_context(client_config),
+ "continuation" => continuation,
+ }
+
+ return self._post_json("/youtubei/v1/next", data, client_config)
+ end
+
+ # :ditto:
+ def next(data : Hash, *, client_config : ClientConfig | Nil = nil)
+ # JSON Request data, required by the API
+ data2 = data.merge({
+ "context" => self.make_context(client_config),
+ })
+
+ return self._post_json("/youtubei/v1/next", data2, client_config)
+ end
+
+ # Allow a NamedTuple to be passed, too.
+ def next(data : NamedTuple, *, client_config : ClientConfig | Nil = nil)
+ return self.next(data.to_h, client_config: client_config)
+ end
+
+ ####################################################################
+ # player(video_id, params, client_config?)
+ #
+ # Requests the youtubei/v1/player endpoint with the required headers
+ # and POST data in order to get a JSON reply.
+ #
+ # The requested data is a video ID (`v=` parameter), with some
+ # additional parameters, formatted as a base64 string.
+ #
+ # An optional ClientConfig parameter can be passed, too (see
+ # `struct ClientConfig` above for more details).
+ #
+ def player(
+ video_id : String,
+ *, # Force the following parameters to be passed by name
+ params : String,
+ client_config : ClientConfig | Nil = nil
+ )
+ # Playback context, separate because it can be different between clients
+ playback_ctx = {
+ "html5Preference" => "HTML5_PREF_WANTS",
+ "referer" => "https://www.youtube.com/watch?v=#{video_id}",
+ } of String => String | Int64
+
+ if {"WEB", "TVHTML5"}.any? { |s| client_config.name.starts_with? s }
+ if sts = DECRYPT_FUNCTION.try &.get_sts
+ playback_ctx["signatureTimestamp"] = sts.to_i64
+ end
+ end
+
+ # JSON Request data, required by the API
+ data = {
+ "contentCheckOk" => true,
+ "videoId" => video_id,
+ "context" => self.make_context(client_config, video_id),
+ "racyCheckOk" => true,
+ "user" => {
+ "lockedSafetyMode" => false,
+ },
+ "playbackContext" => {
+ "contentPlaybackContext" => playback_ctx,
+ },
+ "serviceIntegrityDimensions" => {
+ "poToken" => CONFIG.po_token,
+ },
+ }
+
+ # Append the additional parameters if those were provided
+ if params != ""
+ data["params"] = params
+ end
+
+ return self._post_json("/youtubei/v1/player", data, client_config)
+ end
+
+ ####################################################################
+ # resolve_url(url, client_config?)
+ #
+ # Requests the youtubei/v1/navigation/resolve_url endpoint with the
+ # required headers and POST data in order to get a JSON reply.
+ #
+ # An optional ClientConfig parameter can be passed, too (see
+ # `struct ClientConfig` above for more details).
+ #
+ # Output:
+ #
+ # ```
+ # # Valid channel "brand URL" gives the related UCID and browse ID
+ # channel_a = YoutubeAPI.resolve_url("https://youtube.com/c/google")
+ # channel_a # => {
+ # "endpoint": {
+ # "browseEndpoint": {
+ # "params": "EgC4AQA%3D",
+ # "browseId":"UCK8sQmJBp8GCxrOtXWBpyEA"
+ # },
+ # ...
+ # }
+ # }
+ #
+ # # Invalid URL returns throws an InfoException
+ # channel_b = YoutubeAPI.resolve_url("https://youtube.com/c/invalid")
+ # ```
+ #
+ def resolve_url(url : String, client_config : ClientConfig | Nil = nil)
+ data = {
+ "context" => self.make_context(nil),
+ "url" => url,
+ }
+
+ return self._post_json("/youtubei/v1/navigation/resolve_url", data, client_config)
+ end
+
+ ####################################################################
+ # search(search_query, params, client_config?)
+ #
+ # Requests the youtubei/v1/search endpoint with the required headers
+ # and POST data in order to get a JSON reply. As the search results
+ # vary depending on the region, a region code can be specified in
+ # order to get non-US results.
+ #
+ # The requested data is a search string, with some additional
+ # parameters, formatted as a base64 string.
+ #
+ # An optional ClientConfig parameter can be passed, too (see
+ # `struct ClientConfig` above for more details).
+ #
+ def search(
+ search_query : String,
+ params : String,
+ client_config : ClientConfig | Nil = nil
+ )
+ # JSON Request data, required by the API
+ data = {
+ "query" => search_query,
+ "context" => self.make_context(client_config),
+ "params" => params,
+ }
+
+ return self._post_json("/youtubei/v1/search", data, client_config)
+ end
+
+ ####################################################################
+ # get_transcript(params, client_config?)
+ #
+ # Requests the youtubei/v1/get_transcript endpoint with the required headers
+ # and POST data in order to get a JSON reply.
+ #
+ # The requested data is a specially encoded protobuf string that denotes the specific language requested.
+ #
+ # An optional ClientConfig parameter can be passed, too (see
+ # `struct ClientConfig` above for more details).
+ #
+
+ def get_transcript(
+ params : String,
+ client_config : ClientConfig | Nil = nil
+ ) : Hash(String, JSON::Any)
+ data = {
+ "context" => self.make_context(client_config),
+ "params" => params,
+ }
+
+ return self._post_json("/youtubei/v1/get_transcript", data, client_config)
+ end
+
+ ####################################################################
+ # _post_json(endpoint, data, client_config?)
+ #
+ # Internal function that does the actual request to youtube servers
+ # and handles errors.
+ #
+ # The requested data is an endpoint (URL without the domain part)
+ # and the data as a Hash object.
+ #
+ def _post_json(
+ endpoint : String,
+ data : Hash,
+ client_config : ClientConfig | Nil
+ ) : Hash(String, JSON::Any)
+ # Use the default client config if nil is passed
+ client_config ||= DEFAULT_CLIENT_CONFIG
+
+ # Query parameters
+ url = "#{endpoint}?prettyPrint=false"
+
+ headers = HTTP::Headers{
+ "Content-Type" => "application/json; charset=UTF-8",
+ "Accept-Encoding" => "gzip, deflate",
+ "x-goog-api-format-version" => "2",
+ "x-youtube-client-name" => client_config.name_proto,
+ "x-youtube-client-version" => client_config.version,
+ }
+
+ if user_agent = client_config.user_agent
+ headers["User-Agent"] = user_agent
+ end
+
+ if CONFIG.visitor_data.is_a?(String)
+ headers["X-Goog-Visitor-Id"] = CONFIG.visitor_data.as(String)
+ end
+
+ # Logging
+ LOGGER.debug("YoutubeAPI: Using endpoint: \"#{endpoint}\"")
+ LOGGER.trace("YoutubeAPI: ClientConfig: #{client_config}")
+ LOGGER.trace("YoutubeAPI: POST data: #{data}")
+
+ # Send the POST request
+ body = YT_POOL.client() do |client|
+ client.post(url, headers: headers, body: data.to_json) do |response|
+ if response.status_code != 200
+ raise InfoException.new("Error: non 200 status code. Youtube API returned \
+ status code #{response.status_code}. See <a href=\"https://docs.invidious.io/youtube-errors-explained/\"> \
+ https://docs.invidious.io/youtube-errors-explained/</a> for troubleshooting.")
+ end
+ self._decompress(response.body_io, response.headers["Content-Encoding"]?)
+ end
+ end
+
+ # Convert result to Hash
+ initial_data = JSON.parse(body).as_h
+
+ # Error handling
+ if initial_data.has_key?("error")
+ code = initial_data["error"]["code"]
+ message = initial_data["error"]["message"].to_s.sub(/(\\n)+\^$/, "")
+
+ # Logging
+ LOGGER.error("YoutubeAPI: Got error #{code} when requesting #{endpoint}")
+ LOGGER.error("YoutubeAPI: #{message}")
+ LOGGER.info("YoutubeAPI: POST data was: #{data}")
+
+ raise InfoException.new("Could not extract JSON. Youtube API returned \
+ error #{code} with message:<br>\"#{message}\"")
+ end
+
+ return initial_data
+ end
+
+ ####################################################################
+ # _decompress(body_io, headers)
+ #
+ # Internal function that reads the Content-Encoding headers and
+ # decompresses the content accordingly.
+ #
+ # We decompress the body ourselves (when using HTTP::Client) because
+ # the auto-decompress feature is broken in the Crystal stdlib.
+ #
+ # Read more:
+ # - https://github.com/iv-org/invidious/issues/2612
+ # - https://github.com/crystal-lang/crystal/issues/11354
+ #
+ def _decompress(body_io : IO, encodings : String?) : String
+ if encodings
+ # Multiple encodings can be combined, and are listed in the order
+ # in which they were applied. E.g: "deflate, gzip" means that the
+ # content must be first "gunzipped", then "defated".
+ encodings.split(',').reverse.each do |enc|
+ case enc.strip(' ')
+ when "gzip"
+ body_io = Compress::Gzip::Reader.new(body_io, sync_close: true)
+ when "deflate"
+ body_io = Compress::Deflate::Reader.new(body_io, sync_close: true)
+ end
+ end
+ end
+
+ return body_io.gets_to_end
+ end
+end # End of module