summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--CHANGELOG.md2
-rw-r--r--assets/js/player.js4
-rw-r--r--assets/js/watch.js4
-rw-r--r--docker/Dockerfile7
-rw-r--r--kubernetes/Chart.yaml4
-rw-r--r--kubernetes/values.yaml2
-rw-r--r--shard.yml1
-rw-r--r--src/invidious.cr98
-rw-r--r--src/invidious/channels.cr27
-rw-r--r--src/invidious/comments.cr4
-rw-r--r--src/invidious/helpers/errors.cr2
-rw-r--r--src/invidious/helpers/helpers.cr34
-rw-r--r--src/invidious/helpers/logger.cr16
-rw-r--r--src/invidious/helpers/proxy.cr6
-rw-r--r--src/invidious/helpers/signatures.cr102
-rw-r--r--src/invidious/helpers/utils.cr11
-rw-r--r--src/invidious/jobs/bypass_captcha_job.cr7
-rw-r--r--src/invidious/jobs/refresh_channels_job.cr38
-rw-r--r--src/invidious/jobs/refresh_feeds_job.cr23
-rw-r--r--src/invidious/jobs/subscribe_to_feeds_job.cr19
-rw-r--r--src/invidious/jobs/update_decrypt_function_job.cr9
-rw-r--r--src/invidious/playlists.cr24
-rw-r--r--src/invidious/routes/base_route.cr3
-rw-r--r--src/invidious/routes/watch.cr2
-rw-r--r--src/invidious/routing.cr4
-rw-r--r--src/invidious/users.cr2
-rw-r--r--src/invidious/videos.cr4
-rw-r--r--src/invidious/views/components/item.ecr7
28 files changed, 272 insertions, 194 deletions
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 314a134f..8aa416ec 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,5 @@
+# Note: This is no longer updated and links to omarroths repo, which doesn't exist anymore.
+
# 0.20.0 (2019-011-06)
# Version 0.20.0: Custom Playlists
diff --git a/assets/js/player.js b/assets/js/player.js
index 6d143ed0..5d045391 100644
--- a/assets/js/player.js
+++ b/assets/js/player.js
@@ -69,7 +69,7 @@ if (location.pathname.startsWith('/embed/')) {
player.on('error', function (event) {
if (player.error().code === 2 || player.error().code === 4) {
- setInterval(setTimeout(function (event) {
+ setTimeout(function (event) {
console.log('An error occured in the player, reloading...');
var currentTime = player.currentTime();
@@ -88,7 +88,7 @@ player.on('error', function (event) {
if (!paused) {
player.play();
}
- }, 5000), 5000);
+ }, 5000);
}
});
diff --git a/assets/js/watch.js b/assets/js/watch.js
index 05530f3d..eb493bf3 100644
--- a/assets/js/watch.js
+++ b/assets/js/watch.js
@@ -272,7 +272,7 @@ function get_reddit_comments(retries) {
xhr.onerror = function () {
console.log('Pulling comments failed... ' + retries + '/5');
- setInterval(function () { get_reddit_comments(retries - 1) }, 1000);
+ setTimeout(function () { get_reddit_comments(retries - 1) }, 1000);
}
xhr.ontimeout = function () {
@@ -346,7 +346,7 @@ function get_youtube_comments(retries) {
comments.innerHTML =
'<h3 style="text-align:center"><div class="loading"><i class="icon ion-ios-refresh"></i></div></h3>';
console.log('Pulling comments failed... ' + retries + '/5');
- setInterval(function () { get_youtube_comments(retries - 1) }, 1000);
+ setTimeout(function () { get_youtube_comments(retries - 1) }, 1000);
}
xhr.ontimeout = function () {
diff --git a/docker/Dockerfile b/docker/Dockerfile
index d93f2868..ce4cc765 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -4,12 +4,7 @@ WORKDIR /invidious
COPY ./shard.yml ./shard.yml
COPY ./shard.lock ./shard.lock
RUN shards install && \
- # TODO: Document build instructions
- # See https://github.com/omarroth/boringssl-alpine/blob/master/APKBUILD,
- # https://github.com/omarroth/lsquic-alpine/blob/master/APKBUILD,
- # https://github.com/omarroth/lsquic.cr/issues/1#issuecomment-631610081
- # for details building static lib
- curl -Lo ./lib/lsquic/src/lsquic/ext/liblsquic.a https://omar.yt/lsquic/liblsquic-v2.18.1.a
+ curl -Lo ./lib/lsquic/src/lsquic/ext/liblsquic.a https://github.com/iv-org/lsquic-static-alpine/releases/download/v2.18.1/liblsquic.a
COPY ./src/ ./src/
# TODO: .git folder is required for building – this is destructive.
# See definition of CURRENT_BRANCH, CURRENT_COMMIT and CURRENT_VERSION.
diff --git a/kubernetes/Chart.yaml b/kubernetes/Chart.yaml
index bb0838ad..9e4b793e 100644
--- a/kubernetes/Chart.yaml
+++ b/kubernetes/Chart.yaml
@@ -9,9 +9,9 @@ keywords:
- video
- privacy
home: https://invidio.us/
-icon: https://raw.githubusercontent.com/omarroth/invidious/05988c1c49851b7d0094fca16aeaf6382a7f64ab/assets/favicon-32x32.png
+icon: https://raw.githubusercontent.com/iv-org/invidious/05988c1c49851b7d0094fca16aeaf6382a7f64ab/assets/favicon-32x32.png
sources:
-- https://github.com/omarroth/invidious
+- https://github.com/iv-org/invidious
maintainers:
- name: Leon Klingele
email: mail@leonklingele.de
diff --git a/kubernetes/values.yaml b/kubernetes/values.yaml
index 4d037022..08def6e4 100644
--- a/kubernetes/values.yaml
+++ b/kubernetes/values.yaml
@@ -1,7 +1,7 @@
name: invidious
image:
- repository: omarroth/invidious
+ repository: iv-org/invidious
tag: latest
pullPolicy: Always
diff --git a/shard.yml b/shard.yml
index 2b59786e..e0fa1d25 100644
--- a/shard.yml
+++ b/shard.yml
@@ -3,6 +3,7 @@ version: 0.20.1
authors:
- Omar Roth <omarroth@protonmail.com>
+ - Invidous team
targets:
invidious:
diff --git a/src/invidious.cr b/src/invidious.cr
index 1b8be67e..deb24ac3 100644
--- a/src/invidious.cr
+++ b/src/invidious.cr
@@ -106,34 +106,30 @@ LOCALES = {
YT_POOL = QUICPool.new(YT_URL, capacity: CONFIG.pool_size, timeout: 2.0)
-config = CONFIG
-output = STDOUT
-loglvl = LogLevel::Debug
-
+# CLI
Kemal.config.extra_options do |parser|
parser.banner = "Usage: invidious [arguments]"
- parser.on("-c THREADS", "--channel-threads=THREADS", "Number of threads for refreshing channels (default: #{config.channel_threads})") do |number|
+ parser.on("-c THREADS", "--channel-threads=THREADS", "Number of threads for refreshing channels (default: #{CONFIG.channel_threads})") do |number|
begin
- config.channel_threads = number.to_i
+ CONFIG.channel_threads = number.to_i
rescue ex
puts "THREADS must be integer"
exit
end
end
- parser.on("-f THREADS", "--feed-threads=THREADS", "Number of threads for refreshing feeds (default: #{config.feed_threads})") do |number|
+ parser.on("-f THREADS", "--feed-threads=THREADS", "Number of threads for refreshing feeds (default: #{CONFIG.feed_threads})") do |number|
begin
- config.feed_threads = number.to_i
+ CONFIG.feed_threads = number.to_i
rescue ex
puts "THREADS must be integer"
exit
end
end
- parser.on("-o OUTPUT", "--output=OUTPUT", "Redirect output (default: STDOUT)") do |output_arg|
- FileUtils.mkdir_p(File.dirname(output_arg))
- output = File.open(output_arg, mode: "a")
+ parser.on("-o OUTPUT", "--output=OUTPUT", "Redirect output (default: #{CONFIG.output})") do |output|
+ CONFIG.output = output
end
- parser.on("-l LEVEL", "--log-level=LEVEL", "Log level, one of #{LogLevel.values} (default: #{loglvl})") do |loglvl_arg|
- loglvl = LogLevel.parse(loglvl_arg)
+ parser.on("-l LEVEL", "--log-level=LEVEL", "Log level, one of #{LogLevel.values} (default: #{CONFIG.log_level})") do |log_level|
+ CONFIG.log_level = LogLevel.parse(log_level)
end
parser.on("-v", "--version", "Print version") do
puts SOFTWARE.to_pretty_json
@@ -143,43 +139,56 @@ end
Kemal::CLI.new ARGV
-logger = Invidious::LogHandler.new(output, loglvl)
+if CONFIG.output.upcase != "STDOUT"
+ FileUtils.mkdir_p(File.dirname(CONFIG.output))
+end
+OUTPUT = CONFIG.output.upcase == "STDOUT" ? STDOUT : File.open(CONFIG.output, mode: "a")
+LOGGER = Invidious::LogHandler.new(OUTPUT, CONFIG.log_level)
+
+config = CONFIG
# Check table integrity
if CONFIG.check_tables
- check_enum(PG_DB, logger, "privacy", PlaylistPrivacy)
+ check_enum(PG_DB, "privacy", PlaylistPrivacy)
- check_table(PG_DB, logger, "channels", InvidiousChannel)
- check_table(PG_DB, logger, "channel_videos", ChannelVideo)
- check_table(PG_DB, logger, "playlists", InvidiousPlaylist)
- check_table(PG_DB, logger, "playlist_videos", PlaylistVideo)
- check_table(PG_DB, logger, "nonces", Nonce)
- check_table(PG_DB, logger, "session_ids", SessionId)
- check_table(PG_DB, logger, "users", User)
- check_table(PG_DB, logger, "videos", Video)
+ check_table(PG_DB, "channels", InvidiousChannel)
+ check_table(PG_DB, "channel_videos", ChannelVideo)
+ check_table(PG_DB, "playlists", InvidiousPlaylist)
+ check_table(PG_DB, "playlist_videos", PlaylistVideo)
+ check_table(PG_DB, "nonces", Nonce)
+ check_table(PG_DB, "session_ids", SessionId)
+ check_table(PG_DB, "users", User)
+ check_table(PG_DB, "videos", Video)
if CONFIG.cache_annotations
- check_table(PG_DB, logger, "annotations", Annotation)
+ check_table(PG_DB, "annotations", Annotation)
end
end
# Start jobs
-Invidious::Jobs.register Invidious::Jobs::RefreshChannelsJob.new(PG_DB, logger, config)
-Invidious::Jobs.register Invidious::Jobs::RefreshFeedsJob.new(PG_DB, logger, config)
-Invidious::Jobs.register Invidious::Jobs::SubscribeToFeedsJob.new(PG_DB, logger, config, HMAC_KEY)
-Invidious::Jobs.register Invidious::Jobs::UpdateDecryptFunctionJob.new
+Invidious::Jobs.register Invidious::Jobs::RefreshChannelsJob.new(PG_DB, config)
+Invidious::Jobs.register Invidious::Jobs::RefreshFeedsJob.new(PG_DB, config)
+
+DECRYPT_FUNCTION = DecryptFunction.new(CONFIG.decrypt_polling)
+if config.decrypt_polling
+ Invidious::Jobs.register Invidious::Jobs::UpdateDecryptFunctionJob.new
+end
if config.statistics_enabled
Invidious::Jobs.register Invidious::Jobs::StatisticsRefreshJob.new(PG_DB, config, SOFTWARE)
end
+if (config.use_pubsub_feeds.is_a?(Bool) && config.use_pubsub_feeds.as(Bool)) || (config.use_pubsub_feeds.is_a?(Int32) && config.use_pubsub_feeds.as(Int32) > 0)
+ Invidious::Jobs.register Invidious::Jobs::SubscribeToFeedsJob.new(PG_DB, config, HMAC_KEY)
+end
+
if config.popular_enabled
Invidious::Jobs.register Invidious::Jobs::PullPopularVideosJob.new(PG_DB)
end
if config.captcha_key
- Invidious::Jobs.register Invidious::Jobs::BypassCaptchaJob.new(logger, config)
+ Invidious::Jobs.register Invidious::Jobs::BypassCaptchaJob.new(config)
end
connection_channel = Channel({Bool, Channel(PQ::Notification)}).new(32)
@@ -191,8 +200,6 @@ def popular_videos
Invidious::Jobs::PullPopularVideosJob::POPULAR_VIDEOS.get
end
-DECRYPT_FUNCTION = Invidious::Jobs::UpdateDecryptFunctionJob::DECRYPT_FUNCTION
-
before_all do |env|
preferences = begin
Preferences.from_json(env.request.cookies["PREFS"]?.try &.value || "{}")
@@ -1511,7 +1518,7 @@ post "/feed/webhook/:token" do |env|
signature = env.request.headers["X-Hub-Signature"].lchop("sha1=")
if signature != OpenSSL::HMAC.hexdigest(:sha1, HMAC_KEY, body)
- logger.error("/feed/webhook/#{token} : Invalid signature")
+ LOGGER.error("/feed/webhook/#{token} : Invalid signature")
env.response.status_code = 200
next
end
@@ -2133,14 +2140,13 @@ get "/api/v1/annotations/:id" do |env|
file = URI.encode_www_form("#{id[0, 3]}/#{id}.xml")
- client = make_client(ARCHIVE_URL)
- location = client.get("/download/youtubeannotations_#{index}/#{id[0, 2]}.tar/#{file}")
+ location = make_client(ARCHIVE_URL, &.get("/download/youtubeannotations_#{index}/#{id[0, 2]}.tar/#{file}"))
if !location.headers["Location"]?
env.response.status_code = location.status_code
end
- response = make_client(URI.parse(location.headers["Location"])).get(location.headers["Location"])
+ response = make_client(URI.parse(location.headers["Location"]), &.get(location.headers["Location"]))
if response.body.empty?
env.response.status_code = 404
@@ -2622,6 +2628,8 @@ end
begin
playlist = get_playlist(PG_DB, plid, locale)
+ rescue ex : InfoException
+ next error_json(404, ex)
rescue ex
next error_json(404, "Playlist does not exist.")
end
@@ -3182,7 +3190,8 @@ get "/api/manifest/dash/id/:id" do |env|
url = url.rchop("</BaseURL>")
if local
- url = URI.parse(url).full_path
+ uri = URI.parse(url)
+ url = "#{uri.full_path}host/#{uri.host}/"
end
"<BaseURL>#{url}</BaseURL>"
@@ -3364,7 +3373,7 @@ get "/latest_version" do |env|
env.redirect "/api/v1/captions/#{id}?label=#{label}&title=#{title}"
next
else
- itag = download_widget["itag"].as_s
+ itag = download_widget["itag"].as_s.to_i
local = "true"
end
end
@@ -3498,8 +3507,12 @@ get "/videoplayback" do |env|
location = URI.parse(response.headers["Location"])
env.response.headers["Access-Control-Allow-Origin"] = "*"
- host = "#{location.scheme}://#{location.host}"
- client = make_client(URI.parse(host), region)
+ new_host = "#{location.scheme}://#{location.host}"
+ if new_host != host
+ host = new_host
+ client.close
+ client = make_client(URI.parse(new_host), region)
+ end
url = "#{location.full_path}&host=#{location.host}#{region ? "&region=#{region}" : ""}"
else
@@ -3530,7 +3543,6 @@ get "/videoplayback" do |env|
end
begin
- client = make_client(URI.parse(host), region)
client.get(url, headers) do |response|
response.headers.each do |key, value|
if !RESPONSE_HEADERS_BLACKLIST.includes?(key.downcase)
@@ -3571,8 +3583,6 @@ get "/videoplayback" do |env|
chunk_end = chunk_start + HTTP_CHUNK_SIZE - 1
end
- client = make_client(URI.parse(host), region)
-
# TODO: Record bytes written so we can restart after a chunk fails
while true
if !range_end && content_length
@@ -3636,6 +3646,7 @@ get "/videoplayback" do |env|
if ex.message != "Error reading socket: Connection reset by peer"
break
else
+ client.close
client = make_client(URI.parse(host), region)
end
end
@@ -3645,6 +3656,7 @@ get "/videoplayback" do |env|
first_chunk = false
end
end
+ client.close
end
get "/ggpht/*" do |env|
@@ -3919,7 +3931,7 @@ add_context_storage_type(Array(String))
add_context_storage_type(Preferences)
add_context_storage_type(User)
-Kemal.config.logger = logger
+Kemal.config.logger = LOGGER
Kemal.config.host_binding = Kemal.config.host_binding != "0.0.0.0" ? Kemal.config.host_binding : CONFIG.host_binding
Kemal.config.port = Kemal.config.port != 3000 ? Kemal.config.port : CONFIG.port
Kemal.run
diff --git a/src/invidious/channels.cr b/src/invidious/channels.cr
index 444a6eda..9986fe1b 100644
--- a/src/invidious/channels.cr
+++ b/src/invidious/channels.cr
@@ -203,7 +203,12 @@ def get_channel(id, db, refresh = true, pull_all_videos = true)
end
def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
+ LOGGER.debug("fetch_channel: #{ucid}")
+ LOGGER.trace("fetch_channel: #{ucid} : pull_all_videos = #{pull_all_videos}, locale = #{locale}")
+
+ LOGGER.trace("fetch_channel: #{ucid} : Downloading RSS feed")
rss = YT_POOL.client &.get("/feeds/videos.xml?channel_id=#{ucid}").body
+ LOGGER.trace("fetch_channel: #{ucid} : Parsing RSS feed")
rss = XML.parse_html(rss)
author = rss.xpath_node(%q(//feed/title))
@@ -219,14 +224,19 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
auto_generated = true
end
+ LOGGER.trace("fetch_channel: #{ucid} : author = #{author}, auto_generated = #{auto_generated}")
+
page = 1
+ LOGGER.trace("fetch_channel: #{ucid} : Downloading channel videos page")
response = get_channel_videos_response(ucid, page, auto_generated: auto_generated)
videos = [] of SearchVideo
begin
initial_data = JSON.parse(response.body).as_a.find &.["response"]?
raise InfoException.new("Could not extract channel JSON") if !initial_data
+
+ LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel videos page initial_data")
videos = extract_videos(initial_data.as_h, author, ucid)
rescue ex
if response.body.includes?("To continue with your YouTube experience, please fill out the form below.") ||
@@ -236,6 +246,7 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
raise ex
end
+ LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel RSS feed")
rss.xpath_nodes("//feed/entry").each do |entry|
video_id = entry.xpath_node("videoid").not_nil!.content
title = entry.xpath_node("title").not_nil!.content
@@ -269,6 +280,8 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
views: views,
})
+ LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Updating or inserting video")
+
# We don't include the 'premiere_timestamp' here because channel pages don't include them,
# meaning the above timestamp is always null
was_insert = db.query_one("INSERT INTO channel_videos VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) \
@@ -276,8 +289,13 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
updated = $4, ucid = $5, author = $6, length_seconds = $7, \
live_now = $8, views = $10 returning (xmax=0) as was_insert", *video.to_tuple, as: Bool)
- db.exec("UPDATE users SET notifications = array_append(notifications, $1), \
- feed_needs_update = true WHERE $2 = ANY(subscriptions)", video.id, video.ucid) if was_insert
+ if was_insert
+ LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Inserted, updating subscriptions")
+ db.exec("UPDATE users SET notifications = array_append(notifications, $1), \
+ feed_needs_update = true WHERE $2 = ANY(subscriptions)", video.id, video.ucid)
+ else
+ LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Updated")
+ end
end
if pull_all_videos
@@ -634,7 +652,8 @@ def fetch_channel_community(ucid, continuation, locale, format, thin_mode)
else
video_id = attachment["videoId"].as_s
- json.field "title", attachment["title"]["simpleText"].as_s
+ video_title = attachment["title"]["simpleText"]? || attachment["title"]["runs"]?.try &.[0]?.try &.["text"]?
+ json.field "title", video_title
json.field "videoId", video_id
json.field "videoThumbnails" do
generate_thumbnails(json, video_id)
@@ -656,7 +675,7 @@ def fetch_channel_community(ucid, continuation, locale, format, thin_mode)
json.field "published", published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(published, locale))
- view_count = attachment["viewCountText"]["simpleText"].as_s.gsub(/\D/, "").to_i64? || 0_i64
+ view_count = attachment["viewCountText"]?.try &.["simpleText"].as_s.gsub(/\D/, "").to_i64? || 0_i64
json.field "viewCount", view_count
json.field "viewCountText", translate(locale, "`x` views", number_to_short_text(view_count))
diff --git a/src/invidious/comments.cr b/src/invidious/comments.cr
index 9f9edca0..0ac99ba5 100644
--- a/src/invidious/comments.cr
+++ b/src/invidious/comments.cr
@@ -242,7 +242,7 @@ end
def fetch_reddit_comments(id, sort_by = "confidence")
client = make_client(REDDIT_URL)
- headers = HTTP::Headers{"User-Agent" => "web:invidious:v#{CURRENT_VERSION} (by /u/omarroth)"}
+ headers = HTTP::Headers{"User-Agent" => "web:invidious:v#{CURRENT_VERSION} (by github.com/iv-org/invidious)"}
# TODO: Use something like #479 for a static list of instances to use here
query = "(url:3D#{id}%20OR%20url:#{id})%20(site:invidio.us%20OR%20site:youtube.com%20OR%20site:youtu.be)"
@@ -269,6 +269,8 @@ def fetch_reddit_comments(id, sort_by = "confidence")
raise InfoException.new("Could not fetch comments")
end
+ client.close
+
comments = result[1].data.as(RedditListing).children
return comments, thread
end
diff --git a/src/invidious/helpers/errors.cr b/src/invidious/helpers/errors.cr
index 4487ff8c..2c62d44b 100644
--- a/src/invidious/helpers/errors.cr
+++ b/src/invidious/helpers/errors.cr
@@ -26,6 +26,7 @@ def error_template_helper(env : HTTP::Server::Context, config : Config, locale :
if exception.is_a?(InfoException)
return error_template_helper(env, config, locale, status_code, exception.message || "")
end
+ env.response.content_type = "text/html"
env.response.status_code = status_code
issue_template = %(Title: `#{exception.message} (#{exception.class})`)
issue_template += %(\nDate: `#{Time::Format::ISO_8601_DATE_TIME.format(Time.utc)}`)
@@ -43,6 +44,7 @@ def error_template_helper(env : HTTP::Server::Context, config : Config, locale :
end
def error_template_helper(env : HTTP::Server::Context, config : Config, locale : Hash(String, JSON::Any) | Nil, status_code : Int32, message : String)
+ env.response.content_type = "text/html"
env.response.status_code = status_code
error_message = translate(locale, message)
return templated "error"
diff --git a/src/invidious/helpers/helpers.cr b/src/invidious/helpers/helpers.cr
index 2da49abb..1f56ec92 100644
--- a/src/invidious/helpers/helpers.cr
+++ b/src/invidious/helpers/helpers.cr
@@ -64,10 +64,13 @@ end
class Config
include YAML::Serializable
- property channel_threads : Int32 # Number of threads to use for crawling videos from channels (for updating subscriptions)
- property feed_threads : Int32 # Number of threads to use for updating feeds
+ property channel_threads : Int32 = 1 # Number of threads to use for crawling videos from channels (for updating subscriptions)
+ property feed_threads : Int32 = 1 # Number of threads to use for updating feeds
+ property output : String = "STDOUT" # Log file path or STDOUT
+ property log_level : LogLevel = LogLevel::Info # Default log level, valid YAML values are ints and strings, see src/invidious/helpers/logger.cr
property db : DBConfig # Database configuration
- property full_refresh : Bool # Used for crawling channels: threads should check all videos uploaded by a channel
+ property decrypt_polling : Bool = true # Use polling to keep decryption function up to date
+ property full_refresh : Bool = false # Used for crawling channels: threads should check all videos uploaded by a channel
property https_only : Bool? # Used to tell Invidious it is behind a proxy, so links to resources should be https://
property hmac_key : String? # HMAC signing key for CSRF tokens and verifying pubsub subscriptions
property domain : String? # Domain to be used for links to resources on the site where an absolute URL is required
@@ -92,7 +95,6 @@ class Config
property port : Int32 = 3000 # Port to listen for connections (overrided by command line argument)
property host_binding : String = "0.0.0.0" # Host to bind (overrided by command line argument)
property pool_size : Int32 = 100 # Pool size for HTTP requests to youtube.com and ytimg.com (each domain has a separate pool of `pool_size`)
- property admin_email : String = "omarroth@protonmail.com" # Email for bug reports
@[YAML::Field(converter: Preferences::StringToCookies)]
property cookies : HTTP::Cookies = HTTP::Cookies.new # Saved cookies in "name1=value1; name2=value2..." format
@@ -333,11 +335,11 @@ def extract_items(initial_data : Hash(String, JSON::Any), author_fallback : Stri
items
end
-def check_enum(db, logger, enum_name, struct_type = nil)
+def check_enum(db, enum_name, struct_type = nil)
return # TODO
if !db.query_one?("SELECT true FROM pg_type WHERE typname = $1", enum_name, as: Bool)
- logger.info("check_enum: CREATE TYPE #{enum_name}")
+ LOGGER.info("check_enum: CREATE TYPE #{enum_name}")
db.using_connection do |conn|
conn.as(PG::Connection).exec_all(File.read("config/sql/#{enum_name}.sql"))
@@ -345,12 +347,12 @@ def check_enum(db, logger, enum_name, struct_type = nil)
end
end
-def check_table(db, logger, table_name, struct_type = nil)
+def check_table(db, table_name, struct_type = nil)
# Create table if it doesn't exist
begin
db.exec("SELECT * FROM #{table_name} LIMIT 0")
rescue ex
- logger.info("check_table: check_table: CREATE TABLE #{table_name}")
+ LOGGER.info("check_table: check_table: CREATE TABLE #{table_name}")
db.using_connection do |conn|
conn.as(PG::Connection).exec_all(File.read("config/sql/#{table_name}.sql"))
@@ -370,7 +372,7 @@ def check_table(db, logger, table_name, struct_type = nil)
if name != column_array[i]?
if !column_array[i]?
new_column = column_types.select { |line| line.starts_with? name }[0]
- logger.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
+ LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
next
end
@@ -388,29 +390,29 @@ def check_table(db, logger, table_name, struct_type = nil)
# There's a column we didn't expect
if !new_column
- logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}")
+ LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
column_array = get_column_array(db, table_name)
next
end
- logger.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
+ LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
- logger.info("check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
+ LOGGER.info("check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
db.exec("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
- logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
+ LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
- logger.info("check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
+ LOGGER.info("check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
db.exec("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
column_array = get_column_array(db, table_name)
end
else
- logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
+ LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
end
end
@@ -420,7 +422,7 @@ def check_table(db, logger, table_name, struct_type = nil)
column_array.each do |column|
if !struct_array.includes? column
- logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
+ LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
end
end
diff --git a/src/invidious/helpers/logger.cr b/src/invidious/helpers/logger.cr
index 4e4d7306..7c5b0247 100644
--- a/src/invidious/helpers/logger.cr
+++ b/src/invidious/helpers/logger.cr
@@ -1,14 +1,14 @@
require "logger"
enum LogLevel
- All
- Trace
- Debug
- Info
- Warn
- Error
- Fatal
- Off
+ All = 0
+ Trace = 1
+ Debug = 2
+ Info = 3
+ Warn = 4
+ Error = 5
+ Fatal = 6
+ Off = 7
end
class Invidious::LogHandler < Kemal::BaseLogHandler
diff --git a/src/invidious/helpers/proxy.cr b/src/invidious/helpers/proxy.cr
index 4f415ba0..7a42ef41 100644
--- a/src/invidious/helpers/proxy.cr
+++ b/src/invidious/helpers/proxy.cr
@@ -108,7 +108,9 @@ def filter_proxies(proxies)
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
client.set_proxy(proxy)
- client.head("/").status_code == 200
+ status_ok = client.head("/").status_code == 200
+ client.close
+ status_ok
rescue ex
false
end
@@ -132,6 +134,7 @@ def get_nova_proxies(country_code = "US")
headers["Referer"] = "https://www.proxynova.com/proxy-server-list/country-#{country_code}/"
response = client.get("/proxy-server-list/country-#{country_code}/", headers)
+ client.close
document = XML.parse_html(response.body)
proxies = [] of {ip: String, port: Int32, score: Float64}
@@ -177,6 +180,7 @@ def get_spys_proxies(country_code = "US")
}
response = client.post("/free-proxy-list/#{country_code}/", headers, form: body)
+ client.close
20.times do
if response.status_code == 200
break
diff --git a/src/invidious/helpers/signatures.cr b/src/invidious/helpers/signatures.cr
index f811500f..d8b1de65 100644
--- a/src/invidious/helpers/signatures.cr
+++ b/src/invidious/helpers/signatures.cr
@@ -1,53 +1,73 @@
alias SigProc = Proc(Array(String), Int32, Array(String))
-def fetch_decrypt_function(id = "CvFH_6DNRCY")
- document = YT_POOL.client &.get("/watch?v=#{id}&gl=US&hl=en").body
- url = document.match(/src="(?<url>\/s\/player\/[^\/]+\/player_ias[^\/]+\/en_US\/base.js)"/).not_nil!["url"]
- player = YT_POOL.client &.get(url).body
-
- function_name = player.match(/^(?<name>[^=]+)=function\(\w\){\w=\w\.split\(""\);[^\. ]+\.[^( ]+/m).not_nil!["name"]
- function_body = player.match(/^#{Regex.escape(function_name)}=function\(\w\){(?<body>[^}]+)}/m).not_nil!["body"]
- function_body = function_body.split(";")[1..-2]
-
- var_name = function_body[0][0, 2]
- var_body = player.delete("\n").match(/var #{Regex.escape(var_name)}={(?<body>(.*?))};/).not_nil!["body"]
-
- operations = {} of String => SigProc
- var_body.split("},").each do |operation|
- op_name = operation.match(/^[^:]+/).not_nil![0]
- op_body = operation.match(/\{[^}]+/).not_nil![0]
-
- case op_body
- when "{a.reverse()"
- operations[op_name] = ->(a : Array(String), b : Int32) { a.reverse }
- when "{a.splice(0,b)"
- operations[op_name] = ->(a : Array(String), b : Int32) { a.delete_at(0..(b - 1)); a }
- else
- operations[op_name] = ->(a : Array(String), b : Int32) { c = a[0]; a[0] = a[b % a.size]; a[b % a.size] = c; a }
- end
+struct DecryptFunction
+ @decrypt_function = [] of {SigProc, Int32}
+ @decrypt_time = Time.monotonic
+
+ def initialize(@use_polling = true)
end
- decrypt_function = [] of {SigProc, Int32}
- function_body.each do |function|
- function = function.lchop(var_name).delete("[].")
+ def update_decrypt_function
+ @decrypt_function = fetch_decrypt_function
+ end
- op_name = function.match(/[^\(]+/).not_nil![0]
- value = function.match(/\(\w,(?<value>[\d]+)\)/).not_nil!["value"].to_i
+ private def fetch_decrypt_function(id = "CvFH_6DNRCY")
+ document = YT_POOL.client &.get("/watch?v=#{id}&gl=US&hl=en").body
+ url = document.match(/src="(?<url>\/s\/player\/[^\/]+\/player_ias[^\/]+\/en_US\/base.js)"/).not_nil!["url"]
+ player = YT_POOL.client &.get(url).body
- decrypt_function << {operations[op_name], value}
- end
+ function_name = player.match(/^(?<name>[^=]+)=function\(\w\){\w=\w\.split\(""\);[^\. ]+\.[^( ]+/m).not_nil!["name"]
+ function_body = player.match(/^#{Regex.escape(function_name)}=function\(\w\){(?<body>[^}]+)}/m).not_nil!["body"]
+ function_body = function_body.split(";")[1..-2]
- return decrypt_function
-end
+ var_name = function_body[0][0, 2]
+ var_body = player.delete("\n").match(/var #{Regex.escape(var_name)}={(?<body>(.*?))};/).not_nil!["body"]
+
+ operations = {} of String => SigProc
+ var_body.split("},").each do |operation|
+ op_name = operation.match(/^[^:]+/).not_nil![0]
+ op_body = operation.match(/\{[^}]+/).not_nil![0]
+
+ case op_body
+ when "{a.reverse()"
+ operations[op_name] = ->(a : Array(String), b : Int32) { a.reverse }
+ when "{a.splice(0,b)"
+ operations[op_name] = ->(a : Array(String), b : Int32) { a.delete_at(0..(b - 1)); a }
+ else
+ operations[op_name] = ->(a : Array(String), b : Int32) { c = a[0]; a[0] = a[b % a.size]; a[b % a.size] = c; a }
+ end
+ end
+
+ decrypt_function = [] of {SigProc, Int32}
+ function_body.each do |function|
+ function = function.lchop(var_name).delete("[].")
-def decrypt_signature(fmt : Hash(String, JSON::Any))
- return "" if !fmt["s"]? || !fmt["sp"]?
+ op_name = function.match(/[^\(]+/).not_nil![0]
+ value = function.match(/\(\w,(?<value>[\d]+)\)/).not_nil!["value"].to_i
- sp = fmt["sp"].as_s
- sig = fmt["s"].as_s.split("")
- DECRYPT_FUNCTION.each do |proc, value|
- sig = proc.call(sig, value)
+ decrypt_function << {operations[op_name], value}
+ end
+
+ return decrypt_function
end
- return "&#{sp}=#{sig.join("")}"
+ def decrypt_signature(fmt : Hash(String, JSON::Any))
+ return "" if !fmt["s"]? || !fmt["sp"]?
+
+ sp = fmt["sp"].as_s
+ sig = fmt["s"].as_s.split("")
+ if !@use_polling
+ now = Time.monotonic
+ if now - @decrypt_time > 60.seconds || @decrypt_function.size == 0
+ @decrypt_function = fetch_decrypt_function
+ @decrypt_time = Time.monotonic
+ end
+ end
+
+ @decrypt_function.each do |proc, value|
+ sig = proc.call(sig, value)
+ end
+
+ return "&#{sp}=#{sig.join("")}"
+ end
end
diff --git a/src/invidious/helpers/utils.cr b/src/invidious/helpers/utils.cr
index bb9a35ea..f068b5f2 100644
--- a/src/invidious/helpers/utils.cr
+++ b/src/invidious/helpers/utils.cr
@@ -101,6 +101,15 @@ def make_client(url : URI, region = nil)
return client
end
+def make_client(url : URI, region = nil, &block)
+ client = make_client(url, region)
+ begin
+ yield client
+ ensure
+ client.close
+ end
+end
+
def decode_length_seconds(string)
length_seconds = string.gsub(/[^0-9:]/, "").split(":").map &.to_i
length_seconds = [0] * (3 - length_seconds.size) + length_seconds
@@ -361,7 +370,7 @@ def subscribe_pubsub(topic, key, config)
"hub.secret" => key.to_s,
}
- return make_client(PUBSUB_URL).post("/subscribe", form: body)
+ return make_client(PUBSUB_URL, &.post("/subscribe", form: body))
end
def parse_range(range)
diff --git a/src/invidious/jobs/bypass_captcha_job.cr b/src/invidious/jobs/bypass_captcha_job.cr
index daba64d5..22c54036 100644
--- a/src/invidious/jobs/bypass_captcha_job.cr
+++ b/src/invidious/jobs/bypass_captcha_job.cr
@@ -1,8 +1,7 @@
class Invidious::Jobs::BypassCaptchaJob < Invidious::Jobs::BaseJob
- private getter logger : Invidious::LogHandler
private getter config : Config
- def initialize(@logger, @config)
+ def initialize(@config)
end
def begin
@@ -91,6 +90,8 @@ class Invidious::Jobs::BypassCaptchaJob < Invidious::Jobs::BaseJob
},
}.to_json).body)
+ captcha_client.close
+
raise response["error"].as_s if response["error"]?
task_id = response["taskId"].as_i
@@ -125,7 +126,7 @@ class Invidious::Jobs::BypassCaptchaJob < Invidious::Jobs::BaseJob
end
end
rescue ex
- logger.error("BypassCaptchaJob: #{ex.message}")
+ LOGGER.error("BypassCaptchaJob: #{ex.message}")
ensure
sleep 1.minute
Fiber.yield
diff --git a/src/invidious/jobs/refresh_channels_job.cr b/src/invidious/jobs/refresh_channels_job.cr
index bbf55ff3..3e94a56e 100644
--- a/src/invidious/jobs/refresh_channels_job.cr
+++ b/src/invidious/jobs/refresh_channels_job.cr
@@ -1,45 +1,49 @@
class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
private getter db : DB::Database
- private getter logger : Invidious::LogHandler
private getter config : Config
- def initialize(@db, @logger, @config)
+ def initialize(@db, @config)
end
def begin
- max_threads = config.channel_threads
- lim_threads = max_threads
- active_threads = 0
+ max_fibers = config.channel_threads
+ lim_fibers = max_fibers
+ active_fibers = 0
active_channel = Channel(Bool).new
backoff = 1.seconds
loop do
- logger.debug("RefreshChannelsJob: Refreshing all channels")
+ LOGGER.debug("RefreshChannelsJob: Refreshing all channels")
db.query("SELECT id FROM channels ORDER BY updated") do |rs|
rs.each do
id = rs.read(String)
- if active_threads >= lim_threads
+ if active_fibers >= lim_fibers
+ LOGGER.trace("RefreshChannelsJob: Fiber limit reached, waiting...")
if active_channel.receive
- active_threads -= 1
+ LOGGER.trace("RefreshChannelsJob: Fiber limit ok, continuing")
+ active_fibers -= 1
end
end
- active_threads += 1
+ LOGGER.debug("RefreshChannelsJob: #{id} : Spawning fiber")
+ active_fibers += 1
spawn do
begin
- logger.trace("RefreshChannelsJob: Fetching channel #{id}")
+ LOGGER.trace("RefreshChannelsJob: #{id} fiber : Fetching channel")
channel = fetch_channel(id, db, config.full_refresh)
- lim_threads = max_threads
+ lim_fibers = max_fibers
+
+ LOGGER.trace("RefreshChannelsJob: #{id} fiber : Updating DB")
db.exec("UPDATE channels SET updated = $1, author = $2, deleted = false WHERE id = $3", Time.utc, channel.author, id)
rescue ex
- logger.error("RefreshChannelsJob: #{id} : #{ex.message}")
+ LOGGER.error("RefreshChannelsJob: #{id} : #{ex.message}")
if ex.message == "Deleted or invalid channel"
db.exec("UPDATE channels SET updated = $1, deleted = true WHERE id = $2", Time.utc, id)
else
- lim_threads = 1
- logger.error("RefreshChannelsJob: #{id} : backing off for #{backoff}s")
+ lim_fibers = 1
+ LOGGER.error("RefreshChannelsJob: #{id} fiber : backing off for #{backoff}s")
sleep backoff
if backoff < 1.days
backoff += backoff
@@ -47,13 +51,15 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
backoff = 1.days
end
end
+ ensure
+ LOGGER.debug("RefreshChannelsJob: #{id} fiber : Done")
+ active_channel.send(true)
end
-
- active_channel.send(true)
end
end
end
+ LOGGER.debug("RefreshChannelsJob: Done, sleeping for one minute")
sleep 1.minute
Fiber.yield
end
diff --git a/src/invidious/jobs/refresh_feeds_job.cr b/src/invidious/jobs/refresh_feeds_job.cr
index 5dd47639..7b4ccdea 100644
--- a/src/invidious/jobs/refresh_feeds_job.cr
+++ b/src/invidious/jobs/refresh_feeds_job.cr
@@ -1,14 +1,13 @@
class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
private getter db : DB::Database
- private getter logger : Invidious::LogHandler
private getter config : Config
- def initialize(@db, @logger, @config)
+ def initialize(@db, @config)
end
def begin
- max_threads = config.feed_threads
- active_threads = 0
+ max_fibers = config.feed_threads
+ active_fibers = 0
active_channel = Channel(Bool).new
loop do
@@ -17,27 +16,27 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
email = rs.read(String)
view_name = "subscriptions_#{sha256(email)}"
- if active_threads >= max_threads
+ if active_fibers >= max_fibers
if active_channel.receive
- active_threads -= 1
+ active_fibers -= 1
end
end
- active_threads += 1
+ active_fibers += 1
spawn do
begin
# Drop outdated views
column_array = get_column_array(db, view_name)
ChannelVideo.type_array.each_with_index do |name, i|
if name != column_array[i]?
- logger.info("RefreshFeedsJob: DROP MATERIALIZED VIEW #{view_name}")
+ LOGGER.info("RefreshFeedsJob: DROP MATERIALIZED VIEW #{view_name}")
db.exec("DROP MATERIALIZED VIEW #{view_name}")
raise "view does not exist"
end
end
if !db.query_one("SELECT pg_get_viewdef('#{view_name}')", as: String).includes? "WHERE ((cv.ucid = ANY (u.subscriptions))"
- logger.info("RefreshFeedsJob: Materialized view #{view_name} is out-of-date, recreating...")
+ LOGGER.info("RefreshFeedsJob: Materialized view #{view_name} is out-of-date, recreating...")
db.exec("DROP MATERIALIZED VIEW #{view_name}")
end
@@ -49,18 +48,18 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
legacy_view_name = "subscriptions_#{sha256(email)[0..7]}"
db.exec("SELECT * FROM #{legacy_view_name} LIMIT 0")
- logger.info("RefreshFeedsJob: RENAME MATERIALIZED VIEW #{legacy_view_name}")
+ LOGGER.info("RefreshFeedsJob: RENAME MATERIALIZED VIEW #{legacy_view_name}")
db.exec("ALTER MATERIALIZED VIEW #{legacy_view_name} RENAME TO #{view_name}")
rescue ex
begin
# While iterating through, we may have an email stored from a deleted account
if db.query_one?("SELECT true FROM users WHERE email = $1", email, as: Bool)
- logger.info("RefreshFeedsJob: CREATE #{view_name}")
+ LOGGER.info("RefreshFeedsJob: CREATE #{view_name}")
db.exec("CREATE MATERIALIZED VIEW #{view_name} AS #{MATERIALIZED_VIEW_SQL.call(email)}")
db.exec("UPDATE users SET feed_needs_update = false WHERE email = $1", email)
end
rescue ex
- logger.error("RefreshFeedJobs: REFRESH #{email} : #{ex.message}")
+ LOGGER.error("RefreshFeedJobs: REFRESH #{email} : #{ex.message}")
end
end
end
diff --git a/src/invidious/jobs/subscribe_to_feeds_job.cr b/src/invidious/jobs/subscribe_to_feeds_job.cr
index 3bb31299..750aceb8 100644
--- a/src/invidious/jobs/subscribe_to_feeds_job.cr
+++ b/src/invidious/jobs/subscribe_to_feeds_job.cr
@@ -1,19 +1,18 @@
class Invidious::Jobs::SubscribeToFeedsJob < Invidious::Jobs::BaseJob
private getter db : DB::Database
- private getter logger : Invidious::LogHandler
private getter hmac_key : String
private getter config : Config
- def initialize(@db, @logger, @config, @hmac_key)
+ def initialize(@db, @config, @hmac_key)
end
def begin
- max_threads = 1
+ max_fibers = 1
if config.use_pubsub_feeds.is_a?(Int32)
- max_threads = config.use_pubsub_feeds.as(Int32)
+ max_fibers = config.use_pubsub_feeds.as(Int32)
end
- active_threads = 0
+ active_fibers = 0
active_channel = Channel(Bool).new
loop do
@@ -21,23 +20,23 @@ class Invidious::Jobs::SubscribeToFeedsJob < Invidious::Jobs::BaseJob
rs.each do
ucid = rs.read(String)
- if active_threads >= max_threads.as(Int32)
+ if active_fibers >= max_fibers.as(Int32)
if active_channel.receive
- active_threads -= 1
+ active_fibers -= 1
end
end
- active_threads += 1
+ active_fibers += 1
spawn do
begin
response = subscribe_pubsub(ucid, hmac_key, config)
if response.status_code >= 400
- logger.error("SubscribeToFeedsJob: #{ucid} : #{response.body}")
+ LOGGER.error("SubscribeToFeedsJob: #{ucid} : #{response.body}")
end
rescue ex
- logger.error("SubscribeToFeedsJob: #{ucid} : #{ex.message}")
+ LOGGER.error("SubscribeToFeedsJob: #{ucid} : #{ex.message}")
end
active_channel.send(true)
diff --git a/src/invidious/jobs/update_decrypt_function_job.cr b/src/invidious/jobs/update_decrypt_function_job.cr
index 5332c672..6fa0ae1b 100644
--- a/src/invidious/jobs/update_decrypt_function_job.cr
+++ b/src/invidious/jobs/update_decrypt_function_job.cr
@@ -1,15 +1,10 @@
class Invidious::Jobs::UpdateDecryptFunctionJob < Invidious::Jobs::BaseJob
- DECRYPT_FUNCTION = [] of {SigProc, Int32}
-
def begin
loop do
begin
- decrypt_function = fetch_decrypt_function
- DECRYPT_FUNCTION.clear
- decrypt_function.each { |df| DECRYPT_FUNCTION << df }
+ DECRYPT_FUNCTION.update_decrypt_function
rescue ex
- # TODO: Log error
- next
+ LOGGER.error("UpdateDecryptFunctionJob : #{ex.message}")
ensure
sleep 1.minute
Fiber.yield
diff --git a/src/invidious/playlists.cr b/src/invidious/playlists.cr
index d5b41caa..25797a36 100644
--- a/src/invidious/playlists.cr
+++ b/src/invidious/playlists.cr
@@ -365,9 +365,13 @@ def fetch_playlist(plid, locale)
end
initial_data = extract_initial_data(response.body)
- playlist_info = initial_data["sidebar"]?.try &.["playlistSidebarRenderer"]?.try &.["items"]?.try &.[0]["playlistSidebarPrimaryInfoRenderer"]?
+ playlist_sidebar_renderer = initial_data["sidebar"]?.try &.["playlistSidebarRenderer"]?.try &.["items"]?
+ raise InfoException.new("Could not extract playlistSidebarRenderer.") if !playlist_sidebar_renderer
+
+ playlist_info = playlist_sidebar_renderer[0]["playlistSidebarPrimaryInfoRenderer"]?
raise InfoException.new("Could not extract playlist info") if !playlist_info
+
title = playlist_info["title"]?.try &.["runs"][0]?.try &.["text"]?.try &.as_s || ""
desc_item = playlist_info["description"]?
@@ -392,14 +396,18 @@ def fetch_playlist(plid, locale)
end
end
- author_info = initial_data["sidebar"]?.try &.["playlistSidebarRenderer"]?.try &.["items"]?.try &.[1]["playlistSidebarSecondaryInfoRenderer"]?
- .try &.["videoOwner"]["videoOwnerRenderer"]?
-
- raise InfoException.new("Could not extract author info") if !author_info
+ if playlist_sidebar_renderer.size < 2
+ author = ""
+ author_thumbnail = ""
+ ucid = ""
+ else
+ author_info = playlist_sidebar_renderer[1]["playlistSidebarSecondaryInfoRenderer"]?.try &.["videoOwner"]["videoOwnerRenderer"]?
+ raise InfoException.new("Could not extract author info") if !author_info
- author_thumbnail = author_info["thumbnail"]["thumbnails"][0]["url"]?.try &.as_s || ""
- author = author_info["title"]["runs"][0]["text"]?.try &.as_s || ""
- ucid = author_info["title"]["runs"][0]["navigationEndpoint"]["browseEndpoint"]["browseId"]?.try &.as_s || ""
+ author = author_info["title"]["runs"][0]["text"]?.try &.as_s || ""
+ author_thumbnail = author_info["thumbnail"]["thumbnails"][0]["url"]?.try &.as_s || ""
+ ucid = author_info["title"]["runs"][0]["navigationEndpoint"]["browseEndpoint"]["browseId"]?.try &.as_s || ""
+ end
return Playlist.new({
title: title,
diff --git a/src/invidious/routes/base_route.cr b/src/invidious/routes/base_route.cr
index 2852cb04..37624267 100644
--- a/src/invidious/routes/base_route.cr
+++ b/src/invidious/routes/base_route.cr
@@ -1,7 +1,6 @@
abstract class Invidious::Routes::BaseRoute
private getter config : Config
- private getter logger : Invidious::LogHandler
- def initialize(@config, @logger)
+ def initialize(@config)
end
end
diff --git a/src/invidious/routes/watch.cr b/src/invidious/routes/watch.cr
index a5c05c00..65604a88 100644
--- a/src/invidious/routes/watch.cr
+++ b/src/invidious/routes/watch.cr
@@ -62,7 +62,7 @@ class Invidious::Routes::Watch < Invidious::Routes::BaseRoute
rescue ex : VideoRedirect
return env.redirect env.request.resource.gsub(id, ex.video_id)
rescue ex
- logger.error("get_video: #{id} : #{ex.message}")
+ LOGGER.error("get_video: #{id} : #{ex.message}")
return error_template(500, ex)
end
diff --git a/src/invidious/routing.cr b/src/invidious/routing.cr
index 602e6ae5..593c7372 100644
--- a/src/invidious/routing.cr
+++ b/src/invidious/routing.cr
@@ -1,14 +1,14 @@
module Invidious::Routing
macro get(path, controller, method = :handle)
get {{ path }} do |env|
- controller_instance = {{ controller }}.new(config, logger)
+ controller_instance = {{ controller }}.new(config)
controller_instance.{{ method.id }}(env)
end
end
macro post(path, controller, method = :handle)
post {{ path }} do |env|
- controller_instance = {{ controller }}.new(config, logger)
+ controller_instance = {{ controller }}.new(config)
controller_instance.{{ method.id }}(env)
end
end
diff --git a/src/invidious/users.cr b/src/invidious/users.cr
index 6a3ca5c1..153e3b6a 100644
--- a/src/invidious/users.cr
+++ b/src/invidious/users.cr
@@ -427,7 +427,7 @@ def generate_captcha(key, db)
end
def generate_text_captcha(key, db)
- response = make_client(TEXTCAPTCHA_URL).get("/omarroth@protonmail.com.json").body
+ response = make_client(TEXTCAPTCHA_URL, &.get("/github.com/iv.org/invidious.json").body)
response = JSON.parse(response)
tokens = response["a"].as_a.map do |answer|
diff --git a/src/invidious/videos.cr b/src/invidious/videos.cr
index 4a831110..74edc156 100644
--- a/src/invidious/videos.cr
+++ b/src/invidious/videos.cr
@@ -580,7 +580,7 @@ struct Video
s.each do |k, v|
fmt[k] = JSON::Any.new(v)
end
- fmt["url"] = JSON::Any.new("#{fmt["url"]}#{decrypt_signature(fmt)}")
+ fmt["url"] = JSON::Any.new("#{fmt["url"]}#{DECRYPT_FUNCTION.decrypt_signature(fmt)}")
end
fmt["url"] = JSON::Any.new("#{fmt["url"]}&host=#{URI.parse(fmt["url"].as_s).host}")
@@ -599,7 +599,7 @@ struct Video
s.each do |k, v|
fmt[k] = JSON::Any.new(v)
end
- fmt["url"] = JSON::Any.new("#{fmt["url"]}#{decrypt_signature(fmt)}")
+ fmt["url"] = JSON::Any.new("#{fmt["url"]}#{DECRYPT_FUNCTION.decrypt_signature(fmt)}")
end
fmt["url"] = JSON::Any.new("#{fmt["url"]}&host=#{URI.parse(fmt["url"].as_s).host}")
diff --git a/src/invidious/views/components/item.ecr b/src/invidious/views/components/item.ecr
index 0c19fc1b..e4a60697 100644
--- a/src/invidious/views/components/item.ecr
+++ b/src/invidious/views/components/item.ecr
@@ -137,10 +137,13 @@
</a>
<% end %>
<p><a href="/watch?v=<%= item.id %>"><%= HTML.escape(item.title) %></a></p>
- <p>
- <b>
+ <p style="display: flex;">
+ <b style="flex: 1;">
<a style="width:100%" href="/channel/<%= item.ucid %>"><%= item.author %></a>
</b>
+ <a title="Audio mode" href="/watch?v=<%= item.id %>&amp;listen=1">
+ <i class="icon ion-md-headset"></i>
+ </a>
</p>
<h5 class="pure-g">