summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/invidious.cr1
-rw-r--r--src/invidious/channels/channels.cr36
-rw-r--r--src/invidious/channels/community.cr82
-rw-r--r--src/invidious/channels/videos.cr31
-rw-r--r--src/invidious/comments.cr762
-rw-r--r--src/invidious/comments/content.cr89
-rw-r--r--src/invidious/comments/links_util.cr76
-rw-r--r--src/invidious/comments/reddit.cr41
-rw-r--r--src/invidious/comments/reddit_types.cr57
-rw-r--r--src/invidious/comments/youtube.cr250
-rw-r--r--src/invidious/database/users.cr2
-rw-r--r--src/invidious/frontend/comments_reddit.cr50
-rw-r--r--src/invidious/frontend/comments_youtube.cr160
-rw-r--r--src/invidious/hashtag.cr23
-rw-r--r--src/invidious/helpers/serialized_yt_data.cr1
-rw-r--r--src/invidious/helpers/utils.cr78
-rw-r--r--src/invidious/mixes.cr2
-rw-r--r--src/invidious/playlists.cr2
-rw-r--r--src/invidious/routes/api/v1/search.cr28
-rw-r--r--src/invidious/routes/api/v1/videos.cr10
-rw-r--r--src/invidious/routes/channels.cr1
-rw-r--r--src/invidious/routes/playlists.cr6
-rw-r--r--src/invidious/routes/preferences.cr9
-rw-r--r--src/invidious/routes/search.cr6
-rw-r--r--src/invidious/routes/watch.cr28
-rw-r--r--src/invidious/routing.cr1
-rw-r--r--src/invidious/search/processors.cr4
-rw-r--r--src/invidious/search/query.cr23
-rw-r--r--src/invidious/trending.cr21
-rw-r--r--src/invidious/user/imports.cr71
-rw-r--r--src/invidious/videos/description.cr64
-rw-r--r--src/invidious/videos/parser.cr6
-rw-r--r--src/invidious/views/community.ecr2
-rw-r--r--src/invidious/views/components/channel_info.ecr4
-rw-r--r--src/invidious/views/components/item.ecr10
-rw-r--r--src/invidious/views/feeds/history.ecr2
-rw-r--r--src/invidious/views/feeds/playlists.ecr15
-rw-r--r--src/invidious/views/user/data_control.ecr7
-rw-r--r--src/invidious/views/watch.ecr4
-rw-r--r--src/invidious/yt_backend/extractors.cr61
-rw-r--r--src/invidious/yt_backend/extractors_utils.cr20
41 files changed, 1153 insertions, 993 deletions
diff --git a/src/invidious.cr b/src/invidious.cr
index d4f8e0fb..b5abd5c7 100644
--- a/src/invidious.cr
+++ b/src/invidious.cr
@@ -43,6 +43,7 @@ require "./invidious/videos/*"
require "./invidious/jsonify/**"
require "./invidious/*"
+require "./invidious/comments/*"
require "./invidious/channels/*"
require "./invidious/user/*"
require "./invidious/search/*"
diff --git a/src/invidious/channels/channels.cr b/src/invidious/channels/channels.cr
index 63dd2194..c3d6124f 100644
--- a/src/invidious/channels/channels.cr
+++ b/src/invidious/channels/channels.cr
@@ -159,12 +159,18 @@ def fetch_channel(ucid, pull_all_videos : Bool)
LOGGER.debug("fetch_channel: #{ucid}")
LOGGER.trace("fetch_channel: #{ucid} : pull_all_videos = #{pull_all_videos}")
+ namespaces = {
+ "yt" => "http://www.youtube.com/xml/schemas/2015",
+ "media" => "http://search.yahoo.com/mrss/",
+ "default" => "http://www.w3.org/2005/Atom",
+ }
+
LOGGER.trace("fetch_channel: #{ucid} : Downloading RSS feed")
rss = YT_POOL.client &.get("/feeds/videos.xml?channel_id=#{ucid}").body
LOGGER.trace("fetch_channel: #{ucid} : Parsing RSS feed")
- rss = XML.parse_html(rss)
+ rss = XML.parse(rss)
- author = rss.xpath_node(%q(//feed/title))
+ author = rss.xpath_node("//default:feed/default:title", namespaces)
if !author
raise InfoException.new("Deleted or invalid channel")
end
@@ -192,15 +198,23 @@ def fetch_channel(ucid, pull_all_videos : Bool)
videos, continuation = IV::Channel::Tabs.get_videos(channel)
LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel RSS feed")
- rss.xpath_nodes("//feed/entry").each do |entry|
- video_id = entry.xpath_node("videoid").not_nil!.content
- title = entry.xpath_node("title").not_nil!.content
- published = Time.parse_rfc3339(entry.xpath_node("published").not_nil!.content)
- updated = Time.parse_rfc3339(entry.xpath_node("updated").not_nil!.content)
- author = entry.xpath_node("author/name").not_nil!.content
- ucid = entry.xpath_node("channelid").not_nil!.content
- views = entry.xpath_node("group/community/statistics").try &.["views"]?.try &.to_i64?
- views ||= 0_i64
+ rss.xpath_nodes("//default:feed/default:entry", namespaces).each do |entry|
+ video_id = entry.xpath_node("yt:videoId", namespaces).not_nil!.content
+ title = entry.xpath_node("default:title", namespaces).not_nil!.content
+
+ published = Time.parse_rfc3339(
+ entry.xpath_node("default:published", namespaces).not_nil!.content
+ )
+ updated = Time.parse_rfc3339(
+ entry.xpath_node("default:updated", namespaces).not_nil!.content
+ )
+
+ author = entry.xpath_node("default:author/default:name", namespaces).not_nil!.content
+ ucid = entry.xpath_node("yt:channelId", namespaces).not_nil!.content
+
+ views = entry
+ .xpath_node("media:group/media:community/media:statistics", namespaces)
+ .try &.["views"]?.try &.to_i64? || 0_i64
channel_video = videos
.select(SearchVideo)
diff --git a/src/invidious/channels/community.cr b/src/invidious/channels/community.cr
index ce34ff82..aac4bc8a 100644
--- a/src/invidious/channels/community.cr
+++ b/src/invidious/channels/community.cr
@@ -31,18 +31,16 @@ def fetch_channel_community(ucid, continuation, locale, format, thin_mode)
session_token: session_token,
}
- response = YT_POOL.client &.post("/comment_service_ajax?action_get_comments=1&ctoken=#{continuation}&continuation=#{continuation}&hl=en&gl=US", headers, form: post_req)
- body = JSON.parse(response.body)
+ body = YoutubeAPI.browse(continuation)
- body = body["response"]["continuationContents"]["itemSectionContinuation"]? ||
- body["response"]["continuationContents"]["backstageCommentsContinuation"]?
+ body = body.dig?("continuationContents", "itemSectionContinuation") ||
+ body.dig?("continuationContents", "backstageCommentsContinuation")
if !body
raise InfoException.new("Could not extract continuation.")
end
end
- continuation = body["continuations"]?.try &.[0]["nextContinuationData"]["continuation"].as_s
posts = body["contents"].as_a
if message = posts[0]["messageRenderer"]?
@@ -125,49 +123,13 @@ def fetch_channel_community(ucid, continuation, locale, format, thin_mode)
if attachment = post["backstageAttachment"]?
json.field "attachment" do
- json.object do
- case attachment.as_h
- when .has_key?("videoRenderer")
- attachment = attachment["videoRenderer"]
- json.field "type", "video"
-
- if !attachment["videoId"]?
- error_message = (attachment["title"]["simpleText"]? ||
- attachment["title"]["runs"]?.try &.[0]?.try &.["text"]?)
-
- json.field "error", error_message
- else
- video_id = attachment["videoId"].as_s
-
- video_title = attachment["title"]["simpleText"]? || attachment["title"]["runs"]?.try &.[0]?.try &.["text"]?
- json.field "title", video_title
- json.field "videoId", video_id
- json.field "videoThumbnails" do
- Invidious::JSONify::APIv1.thumbnails(json, video_id)
- end
-
- json.field "lengthSeconds", decode_length_seconds(attachment["lengthText"]["simpleText"].as_s)
-
- author_info = attachment["ownerText"]["runs"][0].as_h
-
- json.field "author", author_info["text"].as_s
- json.field "authorId", author_info["navigationEndpoint"]["browseEndpoint"]["browseId"]
- json.field "authorUrl", author_info["navigationEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"]
-
- # TODO: json.field "authorThumbnails", "channelThumbnailSupportedRenderers"
- # TODO: json.field "authorVerified", "ownerBadges"
-
- published = decode_date(attachment["publishedTimeText"]["simpleText"].as_s)
-
- json.field "published", published.to_unix
- json.field "publishedText", translate(locale, "`x` ago", recode_date(published, locale))
-
- view_count = attachment["viewCountText"]?.try &.["simpleText"].as_s.gsub(/\D/, "").to_i64? || 0_i64
-
- json.field "viewCount", view_count
- json.field "viewCountText", translate_count(locale, "generic_views_count", view_count, NumberFormatting::Short)
- end
- when .has_key?("backstageImageRenderer")
+ case attachment.as_h
+ when .has_key?("videoRenderer")
+ parse_item(attachment)
+ .as(SearchVideo)
+ .to_json(locale, json)
+ when .has_key?("backstageImageRenderer")
+ json.object do
attachment = attachment["backstageImageRenderer"]
json.field "type", "image"
@@ -188,7 +150,9 @@ def fetch_channel_community(ucid, continuation, locale, format, thin_mode)
end
end
end
- when .has_key?("pollRenderer")
+ end
+ when .has_key?("pollRenderer")
+ json.object do
attachment = attachment["pollRenderer"]
json.field "type", "poll"
json.field "totalVotes", short_text_to_number(attachment["totalVotes"]["simpleText"].as_s.split(" ")[0])
@@ -221,7 +185,9 @@ def fetch_channel_community(ucid, continuation, locale, format, thin_mode)
end
end
end
- when .has_key?("postMultiImageRenderer")
+ end
+ when .has_key?("postMultiImageRenderer")
+ json.object do
attachment = attachment["postMultiImageRenderer"]
json.field "type", "multiImage"
json.field "images" do
@@ -245,7 +211,13 @@ def fetch_channel_community(ucid, continuation, locale, format, thin_mode)
end
end
end
- else
+ end
+ when .has_key?("playlistRenderer")
+ parse_item(attachment)
+ .as(SearchPlaylist)
+ .to_json(locale, json)
+ else
+ json.object do
json.field "type", "unknown"
json.field "error", "Unrecognized attachment type."
end
@@ -270,17 +242,15 @@ def fetch_channel_community(ucid, continuation, locale, format, thin_mode)
end
end
end
-
- if body["continuations"]?
- continuation = body["continuations"][0]["nextContinuationData"]["continuation"].as_s
- json.field "continuation", extract_channel_community_cursor(continuation)
+ if cont = posts.dig?(-1, "continuationItemRenderer", "continuationEndpoint", "continuationCommand", "token")
+ json.field "continuation", extract_channel_community_cursor(cont.as_s)
end
end
end
if format == "html"
response = JSON.parse(response)
- content_html = template_youtube_comments(response, locale, thin_mode)
+ content_html = IV::Frontend::Comments.template_youtube(response, locale, thin_mode)
response = JSON.build do |json|
json.object do
diff --git a/src/invidious/channels/videos.cr b/src/invidious/channels/videos.cr
index 3d53f2ab..12ed4a7d 100644
--- a/src/invidious/channels/videos.cr
+++ b/src/invidious/channels/videos.cr
@@ -129,38 +129,15 @@ module Invidious::Channel::Tabs
# Shorts
# -------------------
- private def fetch_shorts_data(ucid : String, continuation : String? = nil)
+ def get_shorts(channel : AboutChannel, continuation : String? = nil)
if continuation.nil?
# EgZzaG9ydHPyBgUKA5oBAA%3D%3D is the protobuf object to load "shorts"
# TODO: try to extract the continuation tokens that allows other sorting options
- return YoutubeAPI.browse(ucid, params: "EgZzaG9ydHPyBgUKA5oBAA%3D%3D")
+ initial_data = YoutubeAPI.browse(channel.ucid, params: "EgZzaG9ydHPyBgUKA5oBAA%3D%3D")
else
- return YoutubeAPI.browse(continuation: continuation)
- end
- end
-
- def get_shorts(channel : AboutChannel, continuation : String? = nil)
- initial_data = self.fetch_shorts_data(channel.ucid, continuation)
-
- begin
- # Try to parse the initial data fetched above
- return extract_items(initial_data, channel.author, channel.ucid)
- rescue ex : RetryOnceException
- # Sometimes, for a completely unknown reason, the "reelItemRenderer"
- # object is missing some critical information (it happens once in about
- # 20 subsequent requests). Refreshing the page is required to properly
- # show the "shorts" tab.
- #
- # In order to make the experience smoother for the user, we simulate
- # said page refresh by fetching again the JSON. If that still doesn't
- # work, we raise a BrokenTubeException, as something is really broken.
- begin
- initial_data = self.fetch_shorts_data(channel.ucid, continuation)
- return extract_items(initial_data, channel.author, channel.ucid)
- rescue ex : RetryOnceException
- raise BrokenTubeException.new "reelPlayerHeaderSupportedRenderers"
- end
+ initial_data = YoutubeAPI.browse(continuation: continuation)
end
+ return extract_items(initial_data, channel.author, channel.ucid)
end
# -------------------
diff --git a/src/invidious/comments.cr b/src/invidious/comments.cr
deleted file mode 100644
index b15d63d4..00000000
--- a/src/invidious/comments.cr
+++ /dev/null
@@ -1,762 +0,0 @@
-class RedditThing
- include JSON::Serializable
-
- property kind : String
- property data : RedditComment | RedditLink | RedditMore | RedditListing
-end
-
-class RedditComment
- include JSON::Serializable
-
- property author : String
- property body_html : String
- property replies : RedditThing | String
- property score : Int32
- property depth : Int32
- property permalink : String
-
- @[JSON::Field(converter: RedditComment::TimeConverter)]
- property created_utc : Time
-
- module TimeConverter
- def self.from_json(value : JSON::PullParser) : Time
- Time.unix(value.read_float.to_i)
- end
-
- def self.to_json(value : Time, json : JSON::Builder)
- json.number(value.to_unix)
- end
- end
-end
-
-struct RedditLink
- include JSON::Serializable
-
- property author : String
- property score : Int32
- property subreddit : String
- property num_comments : Int32
- property id : String
- property permalink : String
- property title : String
-end
-
-struct RedditMore
- include JSON::Serializable
-
- property children : Array(String)
- property count : Int32
- property depth : Int32
-end
-
-class RedditListing
- include JSON::Serializable
-
- property children : Array(RedditThing)
- property modhash : String
-end
-
-def fetch_youtube_comments(id, cursor, format, locale, thin_mode, region, sort_by = "top")
- case cursor
- when nil, ""
- ctoken = produce_comment_continuation(id, cursor: "", sort_by: sort_by)
- when .starts_with? "ADSJ"
- ctoken = produce_comment_continuation(id, cursor: cursor, sort_by: sort_by)
- else
- ctoken = cursor
- end
-
- client_config = YoutubeAPI::ClientConfig.new(region: region)
- response = YoutubeAPI.next(continuation: ctoken, client_config: client_config)
- contents = nil
-
- if on_response_received_endpoints = response["onResponseReceivedEndpoints"]?
- header = nil
- on_response_received_endpoints.as_a.each do |item|
- if item["reloadContinuationItemsCommand"]?
- case item["reloadContinuationItemsCommand"]["slot"]
- when "RELOAD_CONTINUATION_SLOT_HEADER"
- header = item["reloadContinuationItemsCommand"]["continuationItems"][0]
- when "RELOAD_CONTINUATION_SLOT_BODY"
- # continuationItems is nil when video has no comments
- contents = item["reloadContinuationItemsCommand"]["continuationItems"]?
- end
- elsif item["appendContinuationItemsAction"]?
- contents = item["appendContinuationItemsAction"]["continuationItems"]
- end
- end
- elsif response["continuationContents"]?
- response = response["continuationContents"]
- if response["commentRepliesContinuation"]?
- body = response["commentRepliesContinuation"]
- else
- body = response["itemSectionContinuation"]
- end
- contents = body["contents"]?
- header = body["header"]?
- else
- raise NotFoundException.new("Comments not found.")
- end
-
- if !contents
- if format == "json"
- return {"comments" => [] of String}.to_json
- else
- return {"contentHtml" => "", "commentCount" => 0}.to_json
- end
- end
-
- continuation_item_renderer = nil
- contents.as_a.reject! do |item|
- if item["continuationItemRenderer"]?
- continuation_item_renderer = item["continuationItemRenderer"]
- true
- end
- end
-
- response = JSON.build do |json|
- json.object do
- if header
- count_text = header["commentsHeaderRenderer"]["countText"]
- comment_count = (count_text["simpleText"]? || count_text["runs"]?.try &.[0]?.try &.["text"]?)
- .try &.as_s.gsub(/\D/, "").to_i? || 0
- json.field "commentCount", comment_count
- end
-
- json.field "videoId", id
-
- json.field "comments" do
- json.array do
- contents.as_a.each do |node|
- json.object do
- if node["commentThreadRenderer"]?
- node = node["commentThreadRenderer"]
- end
-
- if node["replies"]?
- node_replies = node["replies"]["commentRepliesRenderer"]
- end
-
- if node["comment"]?
- node_comment = node["comment"]["commentRenderer"]
- else
- node_comment = node["commentRenderer"]
- end
-
- content_html = node_comment["contentText"]?.try { |t| parse_content(t, id) } || ""
- author = node_comment["authorText"]?.try &.["simpleText"]? || ""
-
- json.field "verified", (node_comment["authorCommentBadge"]? != nil)
-
- json.field "author", author
- json.field "authorThumbnails" do
- json.array do
- node_comment["authorThumbnail"]["thumbnails"].as_a.each do |thumbnail|
- json.object do
- json.field "url", thumbnail["url"]
- json.field "width", thumbnail["width"]
- json.field "height", thumbnail["height"]
- end
- end
- end
- end
-
- if node_comment["authorEndpoint"]?
- json.field "authorId", node_comment["authorEndpoint"]["browseEndpoint"]["browseId"]
- json.field "authorUrl", node_comment["authorEndpoint"]["browseEndpoint"]["canonicalBaseUrl"]
- else
- json.field "authorId", ""
- json.field "authorUrl", ""
- end
-
- published_text = node_comment["publishedTimeText"]["runs"][0]["text"].as_s
- published = decode_date(published_text.rchop(" (edited)"))
-
- if published_text.includes?(" (edited)")
- json.field "isEdited", true
- else
- json.field "isEdited", false
- end
-
- json.field "content", html_to_content(content_html)
- json.field "contentHtml", content_html
-
- json.field "isPinned", (node_comment["pinnedCommentBadge"]? != nil)
- json.field "isSponsor", (node_comment["sponsorCommentBadge"]? != nil)
- if node_comment["sponsorCommentBadge"]?
- # Sponsor icon thumbnails always have one object and there's only ever the url property in it
- json.field "sponsorIconUrl", node_comment.dig("sponsorCommentBadge", "sponsorCommentBadgeRenderer", "customBadge", "thumbnails", 0, "url").to_s
- end
- json.field "published", published.to_unix
- json.field "publishedText", translate(locale, "`x` ago", recode_date(published, locale))
-
- comment_action_buttons_renderer = node_comment["actionButtons"]["commentActionButtonsRenderer"]
-
- json.field "likeCount", comment_action_buttons_renderer["likeButton"]["toggleButtonRenderer"]["accessibilityData"]["accessibilityData"]["label"].as_s.scan(/\d/).map(&.[0]).join.to_i
- json.field "commentId", node_comment["commentId"]
- json.field "authorIsChannelOwner", node_comment["authorIsChannelOwner"]
-
- if comment_action_buttons_renderer["creatorHeart"]?
- hearth_data = comment_action_buttons_renderer["creatorHeart"]["creatorHeartRenderer"]["creatorThumbnail"]
- json.field "creatorHeart" do
- json.object do
- json.field "creatorThumbnail", hearth_data["thumbnails"][-1]["url"]
- json.field "creatorName", hearth_data["accessibility"]["accessibilityData"]["label"]
- end
- end
- end
-
- if node_replies && !response["commentRepliesContinuation"]?
- if node_replies["continuations"]?
- continuation = node_replies["continuations"]?.try &.as_a[0]["nextContinuationData"]["continuation"].as_s
- elsif node_replies["contents"]?
- continuation = node_replies["contents"]?.try &.as_a[0]["continuationItemRenderer"]["continuationEndpoint"]["continuationCommand"]["token"].as_s
- end
- continuation ||= ""
-
- json.field "replies" do
- json.object do
- json.field "replyCount", node_comment["replyCount"]? || 1
- json.field "continuation", continuation
- end
- end
- end
- end
- end
- end
- end
-
- if continuation_item_renderer
- if continuation_item_renderer["continuationEndpoint"]?
- continuation_endpoint = continuation_item_renderer["continuationEndpoint"]
- elsif continuation_item_renderer["button"]?
- continuation_endpoint = continuation_item_renderer["button"]["buttonRenderer"]["command"]
- end
- if continuation_endpoint
- json.field "continuation", continuation_endpoint["continuationCommand"]["token"].as_s
- end
- end
- end
- end
-
- if format == "html"
- response = JSON.parse(response)
- content_html = template_youtube_comments(response, locale, thin_mode)
-
- response = JSON.build do |json|
- json.object do
- json.field "contentHtml", content_html
-
- if response["commentCount"]?
- json.field "commentCount", response["commentCount"]
- else
- json.field "commentCount", 0
- end
- end
- end
- end
-
- return response
-end
-
-def fetch_reddit_comments(id, sort_by = "confidence")
- client = make_client(REDDIT_URL)
- headers = HTTP::Headers{"User-Agent" => "web:invidious:v#{CURRENT_VERSION} (by github.com/iv-org/invidious)"}
-
- # TODO: Use something like #479 for a static list of instances to use here
- query = URI::Params.encode({q: "(url:3D#{id} OR url:#{id}) AND (site:invidio.us OR site:youtube.com OR site:youtu.be)"})
- search_results = client.get("/search.json?#{query}", headers)
-
- if search_results.status_code == 200
- search_results = RedditThing.from_json(search_results.body)
-
- # For videos that have more than one thread, choose the one with the highest score
- threads = search_results.data.as(RedditListing).children
- thread = threads.max_by?(&.data.as(RedditLink).score).try(&.data.as(RedditLink))
- result = thread.try do |t|
- body = client.get("/r/#{t.subreddit}/comments/#{t.id}.json?limit=100&sort=#{sort_by}", headers).body
- Array(RedditThing).from_json(body)
- end
- result ||= [] of RedditThing
- elsif search_results.status_code == 302
- # Previously, if there was only one result then the API would redirect to that result.
- # Now, it appears it will still return a listing so this section is likely unnecessary.
-
- result = client.get(search_results.headers["Location"], headers).body
- result = Array(RedditThing).from_json(result)
-
- thread = result[0].data.as(RedditListing).children[0].data.as(RedditLink)
- else
- raise NotFoundException.new("Comments not found.")
- end
-
- client.close
-
- comments = result[1]?.try(&.data.as(RedditListing).children)
- comments ||= [] of RedditThing
- return comments, thread
-end
-
-def template_youtube_comments(comments, locale, thin_mode, is_replies = false)
- String.build do |html|
- root = comments["comments"].as_a
- root.each do |child|
- if child["replies"]?
- replies_count_text = translate_count(locale,
- "comments_view_x_replies",
- child["replies"]["replyCount"].as_i64 || 0,
- NumberFormatting::Separator
- )
-
- replies_html = <<-END_HTML
- <div id="replies" class="pure-g">
- <div class="pure-u-1-24"></div>
- <div class="pure-u-23-24">
- <p>
- <a href="javascript:void(0)" data-continuation="#{child["replies"]["continuation"]}"
- data-onclick="get_youtube_replies" data-load-replies>#{replies_count_text}</a>
- </p>
- </div>
- </div>
- END_HTML
- end
-
- if !thin_mode
- author_thumbnail = "/ggpht#{URI.parse(child["authorThumbnails"][-1]["url"].as_s).request_target}"
- else
- author_thumbnail = ""
- end
-
- author_name = HTML.escape(child["author"].as_s)
- sponsor_icon = ""
- if child["verified"]?.try &.as_bool && child["authorIsChannelOwner"]?.try &.as_bool
- author_name += "&nbsp;<i class=\"icon ion ion-md-checkmark-circle\"></i>"
- elsif child["verified"]?.try &.as_bool
- author_name += "&nbsp;<i class=\"icon ion ion-md-checkmark\"></i>"
- end
-
- if child["isSponsor"]?.try &.as_bool
- sponsor_icon = String.build do |str|
- str << %(<img alt="" )
- str << %(src="/ggpht) << URI.parse(child["sponsorIconUrl"].as_s).request_target << "\" "
- str << %(title=") << translate(locale, "Channel Sponsor") << "\" "
- str << %(width="16" height="16" />)
- end
- end
- html << <<-END_HTML
- <div class="pure-g" style="width:100%">
- <div class="channel-profile pure-u-4-24 pure-u-md-2-24">
- <img loading="lazy" style="margin-right:1em;margin-top:1em;width:90%" src="#{author_thumbnail}">
- </div>
- <div class="pure-u-20-24 pure-u-md-22-24">
- <p>
- <b>
- <a class="#{child["authorIsChannelOwner"] == true ? "channel-owner" : ""}" href="#{child["authorUrl"]}">#{author_name}</a>
- </b>
- #{sponsor_icon}
- <p style="white-space:pre-wrap">#{child["contentHtml"]}</p>
- END_HTML
-
- if child["attachment"]?
- attachment = child["attachment"]
-
- case attachment["type"]
- when "image"
- attachment = attachment["imageThumbnails"][1]
-
- html << <<-END_HTML
- <div class="pure-g">
- <div class="pure-u-1 pure-u-md-1-2">
- <img loading="lazy" style="width:100%" src="/ggpht#{URI.parse(attachment["url"].as_s).request_target}">
- </div>
- </div>
- END_HTML
- when "video"
- html << <<-END_HTML
- <div class="pure-g">
- <div class="pure-u-1 pure-u-md-1-2">
- <div style="position:relative;width:100%;height:0;padding-bottom:56.25%;margin-bottom:5px">
- END_HTML
-
- if attachment["error"]?
- html << <<-END_HTML
- <p>#{attachment["error"]}</p>
- END_HTML
- else
- html << <<-END_HTML
- <iframe id='ivplayer' style='position:absolute;width:100%;height:100%;left:0;top:0' src='/embed/#{attachment["videoId"]?}?autoplay=0' style='border:none;'></iframe>
- END_HTML
- end
-
- html << <<-END_HTML
- </div>
- </div>
- </div>
- END_HTML
- else nil # Ignore
- end
- end
-
- html << <<-END_HTML
- <span title="#{Time.unix(child["published"].as_i64).to_s(translate(locale, "%A %B %-d, %Y"))}">#{translate(locale, "`x` ago", recode_date(Time.unix(child["published"].as_i64), locale))} #{child["isEdited"] == true ? translate(locale, "(edited)") : ""}</span>
- |
- END_HTML
-
- if comments["videoId"]?
- html << <<-END_HTML
- <a href="https://www.youtube.com/watch?v=#{comments["videoId"]}&lc=#{child["commentId"]}" title="#{translate(locale, "YouTube comment permalink")}">[YT]</a>
- |
- END_HTML
- elsif comments["authorId"]?
- html << <<-END_HTML
- <a href="https://www.youtube.com/channel/#{comments["authorId"]}/community?lb=#{child["commentId"]}" title="#{translate(locale, "YouTube comment permalink")}">[YT]</a>
- |
- END_HTML
- end
-
- html << <<-END_HTML
- <i class="icon ion-ios-thumbs-up"></i> #{number_with_separator(child["likeCount"])}
- END_HTML
-
- if child["creatorHeart"]?
- if !thin_mode
- creator_thumbnail = "/ggpht#{URI.parse(child["creatorHeart"]["creatorThumbnail"].as_s).request_target}"
- else
- creator_thumbnail = ""
- end
-
- html << <<-END_HTML
- <span class="creator-heart-container" title="#{translate(locale, "`x` marked it with a â¤", child["creatorHeart"]["creatorName"].as_s)}">
- <div class="creator-heart">
- <img loading="lazy" class="creator-heart-background-hearted" src="#{creator_thumbnail}"></img>
- <div class="creator-heart-small-hearted">
- <div class="icon ion-ios-heart creator-heart-small-container"></div>
- </div>
- </div>
- </span>
- END_HTML
- end
-
- html << <<-END_HTML
- </p>
- #{replies_html}
- </div>
- </div>
- END_HTML
- end
-
- if comments["continuation"]?
- html << <<-END_HTML
- <div class="pure-g">
- <div class="pure-u-1">
- <p>
- <a href="javascript:void(0)" data-continuation="#{comments["continuation"]}"
- data-onclick="get_youtube_replies" data-load-more #{"data-load-replies" if is_replies}>#{translate(locale, "Load more")}</a>
- </p>
- </div>
- </div>
- END_HTML
- end
- end
-end
-
-def template_reddit_comments(root, locale)
- String.build do |html|
- root.each do |child|
- if child.data.is_a?(RedditComment)
- child = child.data.as(RedditComment)
- body_html = HTML.unescape(child.body_html)
-
- replies_html = ""
- if child.replies.is_a?(RedditThing)
- replies = child.replies.as(RedditThing)
- replies_html = template_reddit_comments(replies.data.as(RedditListing).children, locale)
- end
-
- if child.depth > 0
- html << <<-END_HTML
- <div class="pure-g">
- <div class="pure-u-1-24">
- </div>
- <div class="pure-u-23-24">
- END_HTML
- else
- html << <<-END_HTML
- <div class="pure-g">
- <div class="pure-u-1">
- END_HTML
- end
-
- html << <<-END_HTML
- <p>
- <a href="javascript:void(0)" data-onclick="toggle_parent">[ − ]</a>
- <b><a href="https://www.reddit.com/user/#{child.author}">#{child.author}</a></b>
- #{translate_count(locale, "comments_points_count", child.score, NumberFormatting::Separator)}
- <span title="#{child.created_utc.to_s(translate(locale, "%a %B %-d %T %Y UTC"))}">#{translate(locale, "`x` ago", recode_date(child.created_utc, locale))}</span>
- <a href="https://www.reddit.com#{child.permalink}" title="#{translate(locale, "permalink")}">#{translate(locale, "permalink")}</a>
- </p>
- <div>
- #{body_html}
- #{replies_html}
- </div>
- </div>
- </div>
- END_HTML
- end
- end
- end
-end
-
-def replace_links(html)
- # Check if the document is empty
- # Prevents edge-case bug with Reddit comments, see issue #3115
- if html.nil? || html.empty?
- return html
- end
-
- html = XML.parse_html(html)
-
- html.xpath_nodes(%q(//a)).each do |anchor|
- url = URI.parse(anchor["href"])
-
- if url.host.nil? || url.host.not_nil!.ends_with?("youtube.com") || url.host.not_nil!.ends_with?("youtu.be")
- if url.host.try &.ends_with? "youtu.be"
- url = "/watch?v=#{url.path.lstrip('/')}#{url.query_params}"
- else
- if url.path == "/redirect"
- params = HTTP::Params.parse(url.query.not_nil!)
- anchor["href"] = params["q"]?
- else
- anchor["href"] = url.request_target
- end
- end
- elsif url.to_s == "#"
- begin
- length_seconds = decode_length_seconds(anchor.content)
- rescue ex
- length_seconds = decode_time(anchor.content)
- end
-
- if length_seconds > 0
- anchor["href"] = "javascript:void(0)"
- anchor["onclick"] = "player.currentTime(#{length_seconds})"
- else
- anchor["href"] = url.request_target
- end
- end
- end
-
- html = html.xpath_node(%q(//body)).not_nil!
- if node = html.xpath_node(%q(./p))
- html = node
- end
-
- return html.to_xml(options: XML::SaveOptions::NO_DECL)
-end
-
-def fill_links(html, scheme, host)
- # Check if the document is empty
- # Prevents edge-case bug with Reddit comments, see issue #3115
- if html.nil? || html.empty?
- return html
- end
-
- html = XML.parse_html(html)
-
- html.xpath_nodes("//a").each do |match|
- url = URI.parse(match["href"])
- # Reddit links don't have host
- if !url.host && !match["href"].starts_with?("javascript") && !url.to_s.ends_with? "#"
- url.scheme = scheme
- url.host = host
- match["href"] = url
- end
- end
-
- if host == "www.youtube.com"
- html = html.xpath_node(%q(//body/p)).not_nil!
- end
-
- return html.to_xml(options: XML::SaveOptions::NO_DECL)
-end
-
-def text_to_parsed_content(text : String) : JSON::Any
- nodes = [] of JSON::Any
- # For each line convert line to array of nodes
- text.split('\n').each do |line|
- # In first case line is just a simple node before
- # check patterns inside line
- # { 'text': line }
- currentNodes = [] of JSON::Any
- initialNode = {"text" => line}
- currentNodes << (JSON.parse(initialNode.to_json))
-
- # For each match with url pattern, get last node and preserve
- # last node before create new node with url information
- # { 'text': match, 'navigationEndpoint': { 'urlEndpoint' : 'url': match } }
- line.scan(/https?:\/\/[^ ]*/).each do |urlMatch|
- # Retrieve last node and update node without match
- lastNode = currentNodes[currentNodes.size - 1].as_h
- splittedLastNode = lastNode["text"].as_s.split(urlMatch[0])
- lastNode["text"] = JSON.parse(splittedLastNode[0].to_json)
- currentNodes[currentNodes.size - 1] = JSON.parse(lastNode.to_json)
- # Create new node with match and navigation infos
- currentNode = {"text" => urlMatch[0], "navigationEndpoint" => {"urlEndpoint" => {"url" => urlMatch[0]}}}
- currentNodes << (JSON.parse(currentNode.to_json))
- # If text remain after match create new simple node with text after match
- afterNode = {"text" => splittedLastNode.size > 0 ? splittedLastNode[1] : ""}
- currentNodes << (JSON.parse(afterNode.to_json))
- end
-
- # After processing of matches inside line
- # Add \n at end of last node for preserve carriage return
- lastNode = currentNodes[currentNodes.size - 1].as_h
- lastNode["text"] = JSON.parse("#{currentNodes[currentNodes.size - 1]["text"]}\n".to_json)
- currentNodes[currentNodes.size - 1] = JSON.parse(lastNode.to_json)
-
- # Finally add final nodes to nodes returned
- currentNodes.each do |node|
- nodes << (node)
- end
- end
- return JSON.parse({"runs" => nodes}.to_json)
-end
-
-def parse_content(content : JSON::Any, video_id : String? = "") : String
- content["simpleText"]?.try &.as_s.rchop('\ufeff').try { |b| HTML.escape(b) }.to_s ||
- content["runs"]?.try &.as_a.try { |r| content_to_comment_html(r, video_id).try &.to_s.gsub("\n", "<br>") } || ""
-end
-
-def content_to_comment_html(content, video_id : String? = "")
- html_array = content.map do |run|
- # Sometimes, there is an empty element.
- # See: https://github.com/iv-org/invidious/issues/3096
- next if run.as_h.empty?
-
- text = HTML.escape(run["text"].as_s)
-
- if run["navigationEndpoint"]?
- if url = run["navigationEndpoint"]["urlEndpoint"]?.try &.["url"].as_s
- url = URI.parse(url)
- displayed_url = text
-
- if url.host == "youtu.be"
- url = "/watch?v=#{url.request_target.lstrip('/')}"
- elsif url.host.nil? || url.host.not_nil!.ends_with?("youtube.com")
- if url.path == "/redirect"
- # Sometimes, links can be corrupted (why?) so make sure to fallback
- # nicely. See https://github.com/iv-org/invidious/issues/2682
- url = url.query_params["q"]? || ""
- displayed_url = url
- else
- url = url.request_target
- displayed_url = "youtube.com#{url}"
- end
- end
-
- text = %(<a href="#{url}">#{reduce_uri(displayed_url)}</a>)
- elsif watch_endpoint = run["navigationEndpoint"]["watchEndpoint"]?
- start_time = watch_endpoint["startTimeSeconds"]?.try &.as_i
- link_video_id = watch_endpoint["videoId"].as_s
-
- url = "/watch?v=#{link_video_id}"
- url += "&t=#{start_time}" if !start_time.nil?
-
- # If the current video ID (passed through from the caller function)
- # is the same as the video ID in the link, add HTML attributes for
- # the JS handler function that bypasses page reload.
- #
- # See: https://github.com/iv-org/invidious/issues/3063
- if link_video_id == video_id
- start_time ||= 0
- text = %(<a href="#{url}" data-onclick="jump_to_time" data-jump-time="#{start_time}">#{reduce_uri(text)}</a>)
- else
- text = %(<a href="#{url}">#{text}</a>)
- end
- elsif url = run.dig?("navigationEndpoint", "commandMetadata", "webCommandMetadata", "url").try &.as_s
- if text.starts_with?(/\s?[@#]/)
- # Handle "pings" in comments and hasthags differently
- # See:
- # - https://github.com/iv-org/invidious/issues/3038
- # - https://github.com/iv-org/invidious/issues/3062
- text = %(<a href="#{url}">#{text}</a>)
- else
- text = %(<a href="#{url}">#{reduce_uri(url)}</a>)
- end
- end
- end
-
- text = "<b>#{text}</b>" if run["bold"]?
- text = "<s>#{text}</s>" if run["strikethrough"]?
- text = "<i>#{text}</i>" if run["italics"]?
-
- # check for custom emojis
- if run["emoji"]?
- if run["emoji"]["isCustomEmoji"]?.try &.as_bool
- if emojiImage = run.dig?("emoji", "image")
- emojiAlt = emojiImage.dig?("accessibility", "accessibilityData", "label").try &.as_s || text
- emojiThumb = emojiImage["thumbnails"][0]
- text = String.build do |str|
- str << %(<img alt=") << emojiAlt << "\" "
- str << %(src="/ggpht) << URI.parse(emojiThumb["url"].as_s).request_target << "\" "
- str << %(title=") << emojiAlt << "\" "
- str << %(width=") << emojiThumb["width"] << "\" "
- str << %(height=") << emojiThumb["height"] << "\" "
- str << %(class="channel-emoji"/>)
- end
- else
- # Hide deleted channel emoji
- text = ""
- end
- end
- end
-
- text
- end
-
- return html_array.join("").delete('\ufeff')
-end
-
-def produce_comment_continuation(video_id, cursor = "", sort_by = "top")
- object = {
- "2:embedded" => {
- "2:string" => video_id,
- "25:varint" => 0_i64,
- "28:varint" => 1_i64,
- "36:embedded" => {
- "5:varint" => -1_i64,
- "8:varint" => 0_i64,
- },
- "40:embedded" => {
- "1:varint" => 4_i64,
- "3:string" => "https://www.youtube.com",
- "4:string" => "",
- },
- },
- "3:varint" => 6_i64,
- "6:embedded" => {
- "1:string" => cursor,
- "4:embedded" => {
- "4:string" => video_id,
- "6:varint" => 0_i64,
- },
- "5:varint" => 20_i64,
- },
- }
-
- case sort_by
- when "top"
- object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 0_i64
- when "new", "newest"
- object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 1_i64
- else # top
- object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 0_i64
- end
-
- continuation = object.try { |i| Protodec::Any.cast_json(i) }
- .try { |i| Protodec::Any.from_json(i) }
- .try { |i| Base64.urlsafe_encode(i) }
- .try { |i| URI.encode_www_form(i) }
-
- return continuation
-end
diff --git a/src/invidious/comments/content.cr b/src/invidious/comments/content.cr
new file mode 100644
index 00000000..c8cdc2df
--- /dev/null
+++ b/src/invidious/comments/content.cr
@@ -0,0 +1,89 @@
+def text_to_parsed_content(text : String) : JSON::Any
+ nodes = [] of JSON::Any
+ # For each line convert line to array of nodes
+ text.split('\n').each do |line|
+ # In first case line is just a simple node before
+ # check patterns inside line
+ # { 'text': line }
+ currentNodes = [] of JSON::Any
+ initialNode = {"text" => line}
+ currentNodes << (JSON.parse(initialNode.to_json))
+
+ # For each match with url pattern, get last node and preserve
+ # last node before create new node with url information
+ # { 'text': match, 'navigationEndpoint': { 'urlEndpoint' : 'url': match } }
+ line.scan(/https?:\/\/[^ ]*/).each do |urlMatch|
+ # Retrieve last node and update node without match
+ lastNode = currentNodes[currentNodes.size - 1].as_h
+ splittedLastNode = lastNode["text"].as_s.split(urlMatch[0])
+ lastNode["text"] = JSON.parse(splittedLastNode[0].to_json)
+ currentNodes[currentNodes.size - 1] = JSON.parse(lastNode.to_json)
+ # Create new node with match and navigation infos
+ currentNode = {"text" => urlMatch[0], "navigationEndpoint" => {"urlEndpoint" => {"url" => urlMatch[0]}}}
+ currentNodes << (JSON.parse(currentNode.to_json))
+ # If text remain after match create new simple node with text after match
+ afterNode = {"text" => splittedLastNode.size > 1 ? splittedLastNode[1] : ""}
+ currentNodes << (JSON.parse(afterNode.to_json))
+ end
+
+ # After processing of matches inside line
+ # Add \n at end of last node for preserve carriage return
+ lastNode = currentNodes[currentNodes.size - 1].as_h
+ lastNode["text"] = JSON.parse("#{currentNodes[currentNodes.size - 1]["text"]}\n".to_json)
+ currentNodes[currentNodes.size - 1] = JSON.parse(lastNode.to_json)
+
+ # Finally add final nodes to nodes returned
+ currentNodes.each do |node|
+ nodes << (node)
+ end
+ end
+ return JSON.parse({"runs" => nodes}.to_json)
+end
+
+def parse_content(content : JSON::Any, video_id : String? = "") : String
+ content["simpleText"]?.try &.as_s.rchop('\ufeff').try { |b| HTML.escape(b) }.to_s ||
+ content["runs"]?.try &.as_a.try { |r| content_to_comment_html(r, video_id).try &.to_s.gsub("\n", "<br>") } || ""
+end
+
+def content_to_comment_html(content, video_id : String? = "")
+ html_array = content.map do |run|
+ # Sometimes, there is an empty element.
+ # See: https://github.com/iv-org/invidious/issues/3096
+ next if run.as_h.empty?
+
+ text = HTML.escape(run["text"].as_s)
+
+ if navigationEndpoint = run.dig?("navigationEndpoint")
+ text = parse_link_endpoint(navigationEndpoint, text, video_id)
+ end
+
+ text = "<b>#{text}</b>" if run["bold"]?
+ text = "<s>#{text}</s>" if run["strikethrough"]?
+ text = "<i>#{text}</i>" if run["italics"]?
+
+ # check for custom emojis
+ if run["emoji"]?
+ if run["emoji"]["isCustomEmoji"]?.try &.as_bool
+ if emojiImage = run.dig?("emoji", "image")
+ emojiAlt = emojiImage.dig?("accessibility", "accessibilityData", "label").try &.as_s || text
+ emojiThumb = emojiImage["thumbnails"][0]
+ text = String.build do |str|
+ str << %(<img alt=") << emojiAlt << "\" "
+ str << %(src="/ggpht) << URI.parse(emojiThumb["url"].as_s).request_target << "\" "
+ str << %(title=") << emojiAlt << "\" "
+ str << %(width=") << emojiThumb["width"] << "\" "
+ str << %(height=") << emojiThumb["height"] << "\" "
+ str << %(class="channel-emoji" />)
+ end
+ else
+ # Hide deleted channel emoji
+ text = ""
+ end
+ end
+ end
+
+ text
+ end
+
+ return html_array.join("").delete('\ufeff')
+end
diff --git a/src/invidious/comments/links_util.cr b/src/invidious/comments/links_util.cr
new file mode 100644
index 00000000..f89b86d3
--- /dev/null
+++ b/src/invidious/comments/links_util.cr
@@ -0,0 +1,76 @@
+module Invidious::Comments
+ extend self
+
+ def replace_links(html)
+ # Check if the document is empty
+ # Prevents edge-case bug with Reddit comments, see issue #3115
+ if html.nil? || html.empty?
+ return html
+ end
+
+ html = XML.parse_html(html)
+
+ html.xpath_nodes(%q(//a)).each do |anchor|
+ url = URI.parse(anchor["href"])
+
+ if url.host.nil? || url.host.not_nil!.ends_with?("youtube.com") || url.host.not_nil!.ends_with?("youtu.be")
+ if url.host.try &.ends_with? "youtu.be"
+ url = "/watch?v=#{url.path.lstrip('/')}#{url.query_params}"
+ else
+ if url.path == "/redirect"
+ params = HTTP::Params.parse(url.query.not_nil!)
+ anchor["href"] = params["q"]?
+ else
+ anchor["href"] = url.request_target
+ end
+ end
+ elsif url.to_s == "#"
+ begin
+ length_seconds = decode_length_seconds(anchor.content)
+ rescue ex
+ length_seconds = decode_time(anchor.content)
+ end
+
+ if length_seconds > 0
+ anchor["href"] = "javascript:void(0)"
+ anchor["onclick"] = "player.currentTime(#{length_seconds})"
+ else
+ anchor["href"] = url.request_target
+ end
+ end
+ end
+
+ html = html.xpath_node(%q(//body)).not_nil!
+ if node = html.xpath_node(%q(./p))
+ html = node
+ end
+
+ return html.to_xml(options: XML::SaveOptions::NO_DECL)
+ end
+
+ def fill_links(html, scheme, host)
+ # Check if the document is empty
+ # Prevents edge-case bug with Reddit comments, see issue #3115
+ if html.nil? || html.empty?
+ return html
+ end
+
+ html = XML.parse_html(html)
+
+ html.xpath_nodes("//a").each do |match|
+ url = URI.parse(match["href"])
+ # Reddit links don't have host
+ if !url.host && !match["href"].starts_with?("javascript") && !url.to_s.ends_with? "#"
+ url.scheme = scheme
+ url.host = host
+ match["href"] = url
+ end
+ end
+
+ if host == "www.youtube.com"
+ html = html.xpath_node(%q(//body/p)).not_nil!
+ end
+
+ return html.to_xml(options: XML::SaveOptions::NO_DECL)
+ end
+end
diff --git a/src/invidious/comments/reddit.cr b/src/invidious/comments/reddit.cr
new file mode 100644
index 00000000..ba9c19f1
--- /dev/null
+++ b/src/invidious/comments/reddit.cr
@@ -0,0 +1,41 @@
+module Invidious::Comments
+ extend self
+
+ def fetch_reddit(id, sort_by = "confidence")
+ client = make_client(REDDIT_URL)
+ headers = HTTP::Headers{"User-Agent" => "web:invidious:v#{CURRENT_VERSION} (by github.com/iv-org/invidious)"}
+
+ # TODO: Use something like #479 for a static list of instances to use here
+ query = URI::Params.encode({q: "(url:3D#{id} OR url:#{id}) AND (site:invidio.us OR site:youtube.com OR site:youtu.be)"})
+ search_results = client.get("/search.json?#{query}", headers)
+
+ if search_results.status_code == 200
+ search_results = RedditThing.from_json(search_results.body)
+
+ # For videos that have more than one thread, choose the one with the highest score
+ threads = search_results.data.as(RedditListing).children
+ thread = threads.max_by?(&.data.as(RedditLink).score).try(&.data.as(RedditLink))
+ result = thread.try do |t|
+ body = client.get("/r/#{t.subreddit}/comments/#{t.id}.json?limit=100&sort=#{sort_by}", headers).body
+ Array(RedditThing).from_json(body)
+ end
+ result ||= [] of RedditThing
+ elsif search_results.status_code == 302
+ # Previously, if there was only one result then the API would redirect to that result.
+ # Now, it appears it will still return a listing so this section is likely unnecessary.
+
+ result = client.get(search_results.headers["Location"], headers).body
+ result = Array(RedditThing).from_json(result)
+
+ thread = result[0].data.as(RedditListing).children[0].data.as(RedditLink)
+ else
+ raise NotFoundException.new("Comments not found.")
+ end
+
+ client.close
+
+ comments = result[1]?.try(&.data.as(RedditListing).children)
+ comments ||= [] of RedditThing
+ return comments, thread
+ end
+end
diff --git a/src/invidious/comments/reddit_types.cr b/src/invidious/comments/reddit_types.cr
new file mode 100644
index 00000000..796a1183
--- /dev/null
+++ b/src/invidious/comments/reddit_types.cr
@@ -0,0 +1,57 @@
+class RedditThing
+ include JSON::Serializable
+
+ property kind : String
+ property data : RedditComment | RedditLink | RedditMore | RedditListing
+end
+
+class RedditComment
+ include JSON::Serializable
+
+ property author : String
+ property body_html : String
+ property replies : RedditThing | String
+ property score : Int32
+ property depth : Int32
+ property permalink : String
+
+ @[JSON::Field(converter: RedditComment::TimeConverter)]
+ property created_utc : Time
+
+ module TimeConverter
+ def self.from_json(value : JSON::PullParser) : Time
+ Time.unix(value.read_float.to_i)
+ end
+
+ def self.to_json(value : Time, json : JSON::Builder)
+ json.number(value.to_unix)
+ end
+ end
+end
+
+struct RedditLink
+ include JSON::Serializable
+
+ property author : String
+ property score : Int32
+ property subreddit : String
+ property num_comments : Int32
+ property id : String
+ property permalink : String
+ property title : String
+end
+
+struct RedditMore
+ include JSON::Serializable
+
+ property children : Array(String)
+ property count : Int32
+ property depth : Int32
+end
+
+class RedditListing
+ include JSON::Serializable
+
+ property children : Array(RedditThing)
+ property modhash : String
+end
diff --git a/src/invidious/comments/youtube.cr b/src/invidious/comments/youtube.cr
new file mode 100644
index 00000000..1ba1b534
--- /dev/null
+++ b/src/invidious/comments/youtube.cr
@@ -0,0 +1,250 @@
+module Invidious::Comments
+ extend self
+
+ def fetch_youtube(id, cursor, format, locale, thin_mode, region, sort_by = "top")
+ case cursor
+ when nil, ""
+ ctoken = Comments.produce_continuation(id, cursor: "", sort_by: sort_by)
+ when .starts_with? "ADSJ"
+ ctoken = Comments.produce_continuation(id, cursor: cursor, sort_by: sort_by)
+ else
+ ctoken = cursor
+ end
+
+ client_config = YoutubeAPI::ClientConfig.new(region: region)
+ response = YoutubeAPI.next(continuation: ctoken, client_config: client_config)
+ contents = nil
+
+ if on_response_received_endpoints = response["onResponseReceivedEndpoints"]?
+ header = nil
+ on_response_received_endpoints.as_a.each do |item|
+ if item["reloadContinuationItemsCommand"]?
+ case item["reloadContinuationItemsCommand"]["slot"]
+ when "RELOAD_CONTINUATION_SLOT_HEADER"
+ header = item["reloadContinuationItemsCommand"]["continuationItems"][0]
+ when "RELOAD_CONTINUATION_SLOT_BODY"
+ # continuationItems is nil when video has no comments
+ contents = item["reloadContinuationItemsCommand"]["continuationItems"]?
+ end
+ elsif item["appendContinuationItemsAction"]?
+ contents = item["appendContinuationItemsAction"]["continuationItems"]
+ end
+ end
+ elsif response["continuationContents"]?
+ response = response["continuationContents"]
+ if response["commentRepliesContinuation"]?
+ body = response["commentRepliesContinuation"]
+ else
+ body = response["itemSectionContinuation"]
+ end
+ contents = body["contents"]?
+ header = body["header"]?
+ else
+ raise NotFoundException.new("Comments not found.")
+ end
+
+ if !contents
+ if format == "json"
+ return {"comments" => [] of String}.to_json
+ else
+ return {"contentHtml" => "", "commentCount" => 0}.to_json
+ end
+ end
+
+ continuation_item_renderer = nil
+ contents.as_a.reject! do |item|
+ if item["continuationItemRenderer"]?
+ continuation_item_renderer = item["continuationItemRenderer"]
+ true
+ end
+ end
+
+ response = JSON.build do |json|
+ json.object do
+ if header
+ count_text = header["commentsHeaderRenderer"]["countText"]
+ comment_count = (count_text["simpleText"]? || count_text["runs"]?.try &.[0]?.try &.["text"]?)
+ .try &.as_s.gsub(/\D/, "").to_i? || 0
+ json.field "commentCount", comment_count
+ end
+
+ json.field "videoId", id
+
+ json.field "comments" do
+ json.array do
+ contents.as_a.each do |node|
+ json.object do
+ if node["commentThreadRenderer"]?
+ node = node["commentThreadRenderer"]
+ end
+
+ if node["replies"]?
+ node_replies = node["replies"]["commentRepliesRenderer"]
+ end
+
+ if node["comment"]?
+ node_comment = node["comment"]["commentRenderer"]
+ else
+ node_comment = node["commentRenderer"]
+ end
+
+ content_html = node_comment["contentText"]?.try { |t| parse_content(t, id) } || ""
+ author = node_comment["authorText"]?.try &.["simpleText"]? || ""
+
+ json.field "verified", (node_comment["authorCommentBadge"]? != nil)
+
+ json.field "author", author
+ json.field "authorThumbnails" do
+ json.array do
+ node_comment["authorThumbnail"]["thumbnails"].as_a.each do |thumbnail|
+ json.object do
+ json.field "url", thumbnail["url"]
+ json.field "width", thumbnail["width"]
+ json.field "height", thumbnail["height"]
+ end
+ end
+ end
+ end
+
+ if node_comment["authorEndpoint"]?
+ json.field "authorId", node_comment["authorEndpoint"]["browseEndpoint"]["browseId"]
+ json.field "authorUrl", node_comment["authorEndpoint"]["browseEndpoint"]["canonicalBaseUrl"]
+ else
+ json.field "authorId", ""
+ json.field "authorUrl", ""
+ end
+
+ published_text = node_comment["publishedTimeText"]["runs"][0]["text"].as_s
+ published = decode_date(published_text.rchop(" (edited)"))
+
+ if published_text.includes?(" (edited)")
+ json.field "isEdited", true
+ else
+ json.field "isEdited", false
+ end
+
+ json.field "content", html_to_content(content_html)
+ json.field "contentHtml", content_html
+
+ json.field "isPinned", (node_comment["pinnedCommentBadge"]? != nil)
+ json.field "isSponsor", (node_comment["sponsorCommentBadge"]? != nil)
+ if node_comment["sponsorCommentBadge"]?
+ # Sponsor icon thumbnails always have one object and there's only ever the url property in it
+ json.field "sponsorIconUrl", node_comment.dig("sponsorCommentBadge", "sponsorCommentBadgeRenderer", "customBadge", "thumbnails", 0, "url").to_s
+ end
+ json.field "published", published.to_unix
+ json.field "publishedText", translate(locale, "`x` ago", recode_date(published, locale))
+
+ comment_action_buttons_renderer = node_comment["actionButtons"]["commentActionButtonsRenderer"]
+
+ json.field "likeCount", comment_action_buttons_renderer["likeButton"]["toggleButtonRenderer"]["accessibilityData"]["accessibilityData"]["label"].as_s.scan(/\d/).map(&.[0]).join.to_i
+ json.field "commentId", node_comment["commentId"]
+ json.field "authorIsChannelOwner", node_comment["authorIsChannelOwner"]
+
+ if comment_action_buttons_renderer["creatorHeart"]?
+ hearth_data = comment_action_buttons_renderer["creatorHeart"]["creatorHeartRenderer"]["creatorThumbnail"]
+ json.field "creatorHeart" do
+ json.object do
+ json.field "creatorThumbnail", hearth_data["thumbnails"][-1]["url"]
+ json.field "creatorName", hearth_data["accessibility"]["accessibilityData"]["label"]
+ end
+ end
+ end
+
+ if node_replies && !response["commentRepliesContinuation"]?
+ if node_replies["continuations"]?
+ continuation = node_replies["continuations"]?.try &.as_a[0]["nextContinuationData"]["continuation"].as_s
+ elsif node_replies["contents"]?
+ continuation = node_replies["contents"]?.try &.as_a[0]["continuationItemRenderer"]["continuationEndpoint"]["continuationCommand"]["token"].as_s
+ end
+ continuation ||= ""
+
+ json.field "replies" do
+ json.object do
+ json.field "replyCount", node_comment["replyCount"]? || 1
+ json.field "continuation", continuation
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ if continuation_item_renderer
+ if continuation_item_renderer["continuationEndpoint"]?
+ continuation_endpoint = continuation_item_renderer["continuationEndpoint"]
+ elsif continuation_item_renderer["button"]?
+ continuation_endpoint = continuation_item_renderer["button"]["buttonRenderer"]["command"]
+ end
+ if continuation_endpoint
+ json.field "continuation", continuation_endpoint["continuationCommand"]["token"].as_s
+ end
+ end
+ end
+ end
+
+ if format == "html"
+ response = JSON.parse(response)
+ content_html = Frontend::Comments.template_youtube(response, locale, thin_mode)
+
+ response = JSON.build do |json|
+ json.object do
+ json.field "contentHtml", content_html
+
+ if response["commentCount"]?
+ json.field "commentCount", response["commentCount"]
+ else
+ json.field "commentCount", 0
+ end
+ end
+ end
+ end
+
+ return response
+ end
+
+ def produce_continuation(video_id, cursor = "", sort_by = "top")
+ object = {
+ "2:embedded" => {
+ "2:string" => video_id,
+ "25:varint" => 0_i64,
+ "28:varint" => 1_i64,
+ "36:embedded" => {
+ "5:varint" => -1_i64,
+ "8:varint" => 0_i64,
+ },
+ "40:embedded" => {
+ "1:varint" => 4_i64,
+ "3:string" => "https://www.youtube.com",
+ "4:string" => "",
+ },
+ },
+ "3:varint" => 6_i64,
+ "6:embedded" => {
+ "1:string" => cursor,
+ "4:embedded" => {
+ "4:string" => video_id,
+ "6:varint" => 0_i64,
+ },
+ "5:varint" => 20_i64,
+ },
+ }
+
+ case sort_by
+ when "top"
+ object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 0_i64
+ when "new", "newest"
+ object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 1_i64
+ else # top
+ object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 0_i64
+ end
+
+ continuation = object.try { |i| Protodec::Any.cast_json(i) }
+ .try { |i| Protodec::Any.from_json(i) }
+ .try { |i| Base64.urlsafe_encode(i) }
+ .try { |i| URI.encode_www_form(i) }
+
+ return continuation
+ end
+end
diff --git a/src/invidious/database/users.cr b/src/invidious/database/users.cr
index 0a4a4fd8..d54e6a76 100644
--- a/src/invidious/database/users.cr
+++ b/src/invidious/database/users.cr
@@ -52,7 +52,7 @@ module Invidious::Database::Users
def mark_watched(user : User, vid : String)
request = <<-SQL
UPDATE users
- SET watched = array_append(watched, $1)
+ SET watched = array_append(array_remove(watched, $1), $1)
WHERE email = $2
SQL
diff --git a/src/invidious/frontend/comments_reddit.cr b/src/invidious/frontend/comments_reddit.cr
new file mode 100644
index 00000000..b5647bae
--- /dev/null
+++ b/src/invidious/frontend/comments_reddit.cr
@@ -0,0 +1,50 @@
+module Invidious::Frontend::Comments
+ extend self
+
+ def template_reddit(root, locale)
+ String.build do |html|
+ root.each do |child|
+ if child.data.is_a?(RedditComment)
+ child = child.data.as(RedditComment)
+ body_html = HTML.unescape(child.body_html)
+
+ replies_html = ""
+ if child.replies.is_a?(RedditThing)
+ replies = child.replies.as(RedditThing)
+ replies_html = self.template_reddit(replies.data.as(RedditListing).children, locale)
+ end
+
+ if child.depth > 0
+ html << <<-END_HTML
+ <div class="pure-g">
+ <div class="pure-u-1-24">
+ </div>
+ <div class="pure-u-23-24">
+ END_HTML
+ else
+ html << <<-END_HTML
+ <div class="pure-g">
+ <div class="pure-u-1">
+ END_HTML
+ end
+
+ html << <<-END_HTML
+ <p>
+ <a href="javascript:void(0)" data-onclick="toggle_parent">[ − ]</a>
+ <b><a href="https://www.reddit.com/user/#{child.author}">#{child.author}</a></b>
+ #{translate_count(locale, "comments_points_count", child.score, NumberFormatting::Separator)}
+ <span title="#{child.created_utc.to_s(translate(locale, "%a %B %-d %T %Y UTC"))}">#{translate(locale, "`x` ago", recode_date(child.created_utc, locale))}</span>
+ <a href="https://www.reddit.com#{child.permalink}" title="#{translate(locale, "permalink")}">#{translate(locale, "permalink")}</a>
+ </p>
+ <div>
+ #{body_html}
+ #{replies_html}
+ </div>
+ </div>
+ </div>
+ END_HTML
+ end
+ end
+ end
+ end
+end
diff --git a/src/invidious/frontend/comments_youtube.cr b/src/invidious/frontend/comments_youtube.cr
new file mode 100644
index 00000000..41f43f04
--- /dev/null
+++ b/src/invidious/frontend/comments_youtube.cr
@@ -0,0 +1,160 @@
+module Invidious::Frontend::Comments
+ extend self
+
+ def template_youtube(comments, locale, thin_mode, is_replies = false)
+ String.build do |html|
+ root = comments["comments"].as_a
+ root.each do |child|
+ if child["replies"]?
+ replies_count_text = translate_count(locale,
+ "comments_view_x_replies",
+ child["replies"]["replyCount"].as_i64 || 0,
+ NumberFormatting::Separator
+ )
+
+ replies_html = <<-END_HTML
+ <div id="replies" class="pure-g">
+ <div class="pure-u-1-24"></div>
+ <div class="pure-u-23-24">
+ <p>
+ <a href="javascript:void(0)" data-continuation="#{child["replies"]["continuation"]}"
+ data-onclick="get_youtube_replies" data-load-replies>#{replies_count_text}</a>
+ </p>
+ </div>
+ </div>
+ END_HTML
+ end
+
+ if !thin_mode
+ author_thumbnail = "/ggpht#{URI.parse(child["authorThumbnails"][-1]["url"].as_s).request_target}"
+ else
+ author_thumbnail = ""
+ end
+
+ author_name = HTML.escape(child["author"].as_s)
+ sponsor_icon = ""
+ if child["verified"]?.try &.as_bool && child["authorIsChannelOwner"]?.try &.as_bool
+ author_name += "&nbsp;<i class=\"icon ion ion-md-checkmark-circle\"></i>"
+ elsif child["verified"]?.try &.as_bool
+ author_name += "&nbsp;<i class=\"icon ion ion-md-checkmark\"></i>"
+ end
+
+ if child["isSponsor"]?.try &.as_bool
+ sponsor_icon = String.build do |str|
+ str << %(<img alt="" )
+ str << %(src="/ggpht) << URI.parse(child["sponsorIconUrl"].as_s).request_target << "\" "
+ str << %(title=") << translate(locale, "Channel Sponsor") << "\" "
+ str << %(width="16" height="16" />)
+ end
+ end
+ html << <<-END_HTML
+ <div class="pure-g" style="width:100%">
+ <div class="channel-profile pure-u-4-24 pure-u-md-2-24">
+ <img loading="lazy" style="margin-right:1em;margin-top:1em;width:90%" src="#{author_thumbnail}" alt="" />
+ </div>
+ <div class="pure-u-20-24 pure-u-md-22-24">
+ <p>
+ <b>
+ <a class="#{child["authorIsChannelOwner"] == true ? "channel-owner" : ""}" href="#{child["authorUrl"]}">#{author_name}</a>
+ </b>
+ #{sponsor_icon}
+ <p style="white-space:pre-wrap">#{child["contentHtml"]}</p>
+ END_HTML
+
+ if child["attachment"]?
+ attachment = child["attachment"]
+
+ case attachment["type"]
+ when "image"
+ attachment = attachment["imageThumbnails"][1]
+
+ html << <<-END_HTML
+ <div class="pure-g">
+ <div class="pure-u-1 pure-u-md-1-2">
+ <img loading="lazy" style="width:100%" src="/ggpht#{URI.parse(attachment["url"].as_s).request_target}" alt="" />
+ </div>
+ </div>
+ END_HTML
+ when "video"
+ if attachment["error"]?
+ html << <<-END_HTML
+ <div class="pure-g video-iframe-wrapper">
+ <p>#{attachment["error"]}</p>
+ </div>
+ END_HTML
+ else
+ html << <<-END_HTML
+ <div class="pure-g video-iframe-wrapper">
+ <iframe class="video-iframe" src='/embed/#{attachment["videoId"]?}?autoplay=0'></iframe>
+ </div>
+ END_HTML
+ end
+ else nil # Ignore
+ end
+ end
+
+ html << <<-END_HTML
+ <p>
+ <span title="#{Time.unix(child["published"].as_i64).to_s(translate(locale, "%A %B %-d, %Y"))}">#{translate(locale, "`x` ago", recode_date(Time.unix(child["published"].as_i64), locale))} #{child["isEdited"] == true ? translate(locale, "(edited)") : ""}</span>
+ |
+ END_HTML
+
+ if comments["videoId"]?
+ html << <<-END_HTML
+ <a href="https://www.youtube.com/watch?v=#{comments["videoId"]}&lc=#{child["commentId"]}" title="#{translate(locale, "YouTube comment permalink")}">[YT]</a>
+ |
+ END_HTML
+ elsif comments["authorId"]?
+ html << <<-END_HTML
+ <a href="https://www.youtube.com/channel/#{comments["authorId"]}/community?lb=#{child["commentId"]}" title="#{translate(locale, "YouTube comment permalink")}">[YT]</a>
+ |
+ END_HTML
+ end
+
+ html << <<-END_HTML
+ <i class="icon ion-ios-thumbs-up"></i> #{number_with_separator(child["likeCount"])}
+ END_HTML
+
+ if child["creatorHeart"]?
+ if !thin_mode
+ creator_thumbnail = "/ggpht#{URI.parse(child["creatorHeart"]["creatorThumbnail"].as_s).request_target}"
+ else
+ creator_thumbnail = ""
+ end
+
+ html << <<-END_HTML
+ &nbsp;
+ <span class="creator-heart-container" title="#{translate(locale, "`x` marked it with a â¤", child["creatorHeart"]["creatorName"].as_s)}">
+ <span class="creator-heart">
+ <img loading="lazy" class="creator-heart-background-hearted" src="#{creator_thumbnail}" alt="" />
+ <span class="creator-heart-small-hearted">
+ <span class="icon ion-ios-heart creator-heart-small-container"></span>
+ </span>
+ </span>
+ </span>
+ END_HTML
+ end
+
+ html << <<-END_HTML
+ </p>
+ #{replies_html}
+ </div>
+ </div>
+ END_HTML
+ end
+
+ if comments["continuation"]?
+ html << <<-END_HTML
+ <div class="pure-g">
+ <div class="pure-u-1">
+ <p>
+ <a href="javascript:void(0)" data-continuation="#{comments["continuation"]}"
+ data-onclick="get_youtube_replies" data-load-more #{"data-load-replies" if is_replies}>#{translate(locale, "Load more")}</a>
+ </p>
+ </div>
+ </div>
+ END_HTML
+ end
+ end
+ end
+end
diff --git a/src/invidious/hashtag.cr b/src/invidious/hashtag.cr
index bc329205..d9d584c9 100644
--- a/src/invidious/hashtag.cr
+++ b/src/invidious/hashtag.cr
@@ -17,21 +17,18 @@ module Invidious::Hashtag
"80226972:embedded" => {
"2:string" => "FEhashtag",
"3:base64" => {
- "1:varint" => cursor.to_i64,
- },
- "7:base64" => {
- "325477796:embedded" => {
- "1:embedded" => {
- "2:0:embedded" => {
- "2:string" => '#' + hashtag,
- "4:varint" => 0_i64,
- "11:string" => "",
- },
- "4:string" => "browse-feedFEhashtag",
- },
- "2:string" => hashtag,
+ "1:varint" => 60_i64, # result count
+ "15:base64" => {
+ "1:varint" => cursor.to_i64,
+ "2:varint" => 0_i64,
+ },
+ "93:2:embedded" => {
+ "1:string" => hashtag,
+ "2:varint" => 0_i64,
+ "3:varint" => 1_i64,
},
},
+ "35:string" => "browse-feedFEhashtag",
},
}
diff --git a/src/invidious/helpers/serialized_yt_data.cr b/src/invidious/helpers/serialized_yt_data.cr
index c1874780..7c12ad0e 100644
--- a/src/invidious/helpers/serialized_yt_data.cr
+++ b/src/invidious/helpers/serialized_yt_data.cr
@@ -84,6 +84,7 @@ struct SearchVideo
json.field "descriptionHtml", self.description_html
json.field "viewCount", self.views
+ json.field "viewCountText", translate_count(locale, "generic_views_count", self.views, NumberFormatting::Short)
json.field "published", self.published.to_unix
json.field "publishedText", translate(locale, "`x` ago", recode_date(self.published, locale))
json.field "lengthSeconds", self.length_seconds
diff --git a/src/invidious/helpers/utils.cr b/src/invidious/helpers/utils.cr
index 500a2582..48bf769f 100644
--- a/src/invidious/helpers/utils.cr
+++ b/src/invidious/helpers/utils.cr
@@ -111,24 +111,27 @@ def decode_date(string : String)
else nil # Continue
end
- # String matches format "20 hours ago", "4 months ago"...
- date = string.split(" ")[-3, 3]
- delta = date[0].to_i
+ # String matches format "20 hours ago", "4 months ago", "20s ago", "15min ago"...
+ match = string.match(/(?<count>\d+) ?(?<span>[smhdwy]\w*) ago/)
- case date[1]
- when .includes? "second"
+ raise "Could not parse #{string}" if match.nil?
+
+ delta = match["count"].to_i
+
+ case match["span"]
+ when .starts_with? "s" # second(s)
delta = delta.seconds
- when .includes? "minute"
+ when .starts_with? "mi" # minute(s)
delta = delta.minutes
- when .includes? "hour"
+ when .starts_with? "h" # hour(s)
delta = delta.hours
- when .includes? "day"
+ when .starts_with? "d" # day(s)
delta = delta.days
- when .includes? "week"
+ when .starts_with? "w" # week(s)
delta = delta.weeks
- when .includes? "month"
+ when .starts_with? "mo" # month(s)
delta = delta.months
- when .includes? "year"
+ when .starts_with? "y" # year(s)
delta = delta.years
else
raise "Could not parse #{string}"
@@ -389,3 +392,56 @@ def reduce_uri(uri : URI | String, max_length : Int32 = 50, suffix : String = "â
end
return str
end
+
+# Get the html link from a NavigationEndpoint or an innertubeCommand
+def parse_link_endpoint(endpoint : JSON::Any, text : String, video_id : String)
+ if url = endpoint.dig?("urlEndpoint", "url").try &.as_s
+ url = URI.parse(url)
+ displayed_url = text
+
+ if url.host == "youtu.be"
+ url = "/watch?v=#{url.request_target.lstrip('/')}"
+ elsif url.host.nil? || url.host.not_nil!.ends_with?("youtube.com")
+ if url.path == "/redirect"
+ # Sometimes, links can be corrupted (why?) so make sure to fallback
+ # nicely. See https://github.com/iv-org/invidious/issues/2682
+ url = url.query_params["q"]? || ""
+ displayed_url = url
+ else
+ url = url.request_target
+ displayed_url = "youtube.com#{url}"
+ end
+ end
+
+ text = %(<a href="#{url}">#{reduce_uri(displayed_url)}</a>)
+ elsif watch_endpoint = endpoint.dig?("watchEndpoint")
+ start_time = watch_endpoint["startTimeSeconds"]?.try &.as_i
+ link_video_id = watch_endpoint["videoId"].as_s
+
+ url = "/watch?v=#{link_video_id}"
+ url += "&t=#{start_time}" if !start_time.nil?
+
+ # If the current video ID (passed through from the caller function)
+ # is the same as the video ID in the link, add HTML attributes for
+ # the JS handler function that bypasses page reload.
+ #
+ # See: https://github.com/iv-org/invidious/issues/3063
+ if link_video_id == video_id
+ start_time ||= 0
+ text = %(<a href="#{url}" data-onclick="jump_to_time" data-jump-time="#{start_time}">#{reduce_uri(text)}</a>)
+ else
+ text = %(<a href="#{url}">#{text}</a>)
+ end
+ elsif url = endpoint.dig?("commandMetadata", "webCommandMetadata", "url").try &.as_s
+ if text.starts_with?(/\s?[@#]/)
+ # Handle "pings" in comments and hasthags differently
+ # See:
+ # - https://github.com/iv-org/invidious/issues/3038
+ # - https://github.com/iv-org/invidious/issues/3062
+ text = %(<a href="#{url}">#{text}</a>)
+ else
+ text = %(<a href="#{url}">#{reduce_uri(url)}</a>)
+ end
+ end
+ return text
+end
diff --git a/src/invidious/mixes.cr b/src/invidious/mixes.cr
index 3f342b92..823ca85b 100644
--- a/src/invidious/mixes.cr
+++ b/src/invidious/mixes.cr
@@ -97,7 +97,7 @@ def template_mix(mix)
<li class="pure-menu-item">
<a href="/watch?v=#{video["videoId"]}&list=#{mix["mixId"]}">
<div class="thumbnail">
- <img loading="lazy" class="thumbnail" src="/vi/#{video["videoId"]}/mqdefault.jpg">
+ <img loading="lazy" class="thumbnail" src="/vi/#{video["videoId"]}/mqdefault.jpg" alt="" />
<p class="length">#{recode_length_seconds(video["lengthSeconds"].as_i)}</p>
</div>
<p style="width:100%">#{video["title"]}</p>
diff --git a/src/invidious/playlists.cr b/src/invidious/playlists.cr
index 57f1f53e..013be268 100644
--- a/src/invidious/playlists.cr
+++ b/src/invidious/playlists.cr
@@ -507,7 +507,7 @@ def template_playlist(playlist)
<li class="pure-menu-item" id="#{video["videoId"]}">
<a href="/watch?v=#{video["videoId"]}&list=#{playlist["playlistId"]}&index=#{video["index"]}">
<div class="thumbnail">
- <img loading="lazy" class="thumbnail" src="/vi/#{video["videoId"]}/mqdefault.jpg">
+ <img loading="lazy" class="thumbnail" src="/vi/#{video["videoId"]}/mqdefault.jpg" alt="" />
<p class="length">#{recode_length_seconds(video["lengthSeconds"].as_i)}</p>
</div>
<p style="width:100%">#{video["title"]}</p>
diff --git a/src/invidious/routes/api/v1/search.cr b/src/invidious/routes/api/v1/search.cr
index 21451d33..9fb283c2 100644
--- a/src/invidious/routes/api/v1/search.cr
+++ b/src/invidious/routes/api/v1/search.cr
@@ -55,4 +55,32 @@ module Invidious::Routes::API::V1::Search
return error_json(500, ex)
end
end
+
+ def self.hashtag(env)
+ hashtag = env.params.url["hashtag"]
+
+ page = env.params.query["page"]?.try &.to_i? || 1
+
+ locale = env.get("preferences").as(Preferences).locale
+ region = env.params.query["region"]?
+ env.response.content_type = "application/json"
+
+ begin
+ results = Invidious::Hashtag.fetch(hashtag, page, region)
+ rescue ex
+ return error_json(400, ex)
+ end
+
+ JSON.build do |json|
+ json.object do
+ json.field "results" do
+ json.array do
+ results.each do |item|
+ item.to_json(locale, json)
+ end
+ end
+ end
+ end
+ end
+ end
end
diff --git a/src/invidious/routes/api/v1/videos.cr b/src/invidious/routes/api/v1/videos.cr
index f312211e..af4fc806 100644
--- a/src/invidious/routes/api/v1/videos.cr
+++ b/src/invidious/routes/api/v1/videos.cr
@@ -333,7 +333,7 @@ module Invidious::Routes::API::V1::Videos
sort_by ||= "top"
begin
- comments = fetch_youtube_comments(id, continuation, format, locale, thin_mode, region, sort_by: sort_by)
+ comments = Comments.fetch_youtube(id, continuation, format, locale, thin_mode, region, sort_by: sort_by)
rescue ex : NotFoundException
return error_json(404, ex)
rescue ex
@@ -345,7 +345,7 @@ module Invidious::Routes::API::V1::Videos
sort_by ||= "confidence"
begin
- comments, reddit_thread = fetch_reddit_comments(id, sort_by: sort_by)
+ comments, reddit_thread = Comments.fetch_reddit(id, sort_by: sort_by)
rescue ex
comments = nil
reddit_thread = nil
@@ -361,9 +361,9 @@ module Invidious::Routes::API::V1::Videos
return reddit_thread.to_json
else
- content_html = template_reddit_comments(comments, locale)
- content_html = fill_links(content_html, "https", "www.reddit.com")
- content_html = replace_links(content_html)
+ content_html = Frontend::Comments.template_reddit(comments, locale)
+ content_html = Comments.fill_links(content_html, "https", "www.reddit.com")
+ content_html = Comments.replace_links(content_html)
response = {
"title" => reddit_thread.title,
"permalink" => reddit_thread.permalink,
diff --git a/src/invidious/routes/channels.cr b/src/invidious/routes/channels.cr
index d3969d29..16621994 100644
--- a/src/invidious/routes/channels.cr
+++ b/src/invidious/routes/channels.cr
@@ -278,6 +278,7 @@ module Invidious::Routes::Channels
return error_template(500, ex)
end
+ env.set "search", "channel:#{ucid} "
return {locale, user, subscriptions, continuation, ucid, channel}
end
end
diff --git a/src/invidious/routes/playlists.cr b/src/invidious/routes/playlists.cr
index 0d242ee6..8675fa45 100644
--- a/src/invidious/routes/playlists.cr
+++ b/src/invidious/routes/playlists.cr
@@ -410,8 +410,8 @@ module Invidious::Routes::Playlists
return error_template(500, ex)
end
- page_count = (playlist.video_count / 100).to_i
- page_count += 1 if (playlist.video_count % 100) > 0
+ page_count = (playlist.video_count / 200).to_i
+ page_count += 1 if (playlist.video_count % 200) > 0
if page > page_count
return env.redirect "/playlist?list=#{plid}&page=#{page_count}"
@@ -422,7 +422,7 @@ module Invidious::Routes::Playlists
end
begin
- videos = get_playlist_videos(playlist, offset: (page - 1) * 100)
+ videos = get_playlist_videos(playlist, offset: (page - 1) * 200)
rescue ex
return error_template(500, "Error encountered while retrieving playlist videos.<br>#{ex.message}")
end
diff --git a/src/invidious/routes/preferences.cr b/src/invidious/routes/preferences.cr
index 570cba69..abe0f34e 100644
--- a/src/invidious/routes/preferences.cr
+++ b/src/invidious/routes/preferences.cr
@@ -310,6 +310,15 @@ module Invidious::Routes::PreferencesRoute
response: error_template(415, "Invalid subscription file uploaded")
)
end
+ when "import_youtube_pl"
+ filename = part.filename || ""
+ success = Invidious::User::Import.from_youtube_pl(user, body, filename, type)
+
+ if !success
+ haltf(env, status_code: 415,
+ response: error_template(415, "Invalid playlist file uploaded")
+ )
+ end
when "import_freetube"
Invidious::User::Import.from_freetube(user, body)
when "import_newpipe_subscriptions"
diff --git a/src/invidious/routes/search.cr b/src/invidious/routes/search.cr
index 2a9705cf..6c3088de 100644
--- a/src/invidious/routes/search.cr
+++ b/src/invidious/routes/search.cr
@@ -65,7 +65,11 @@ module Invidious::Routes::Search
redirect_url = Invidious::Frontend::Misc.redirect_url(env)
- env.set "search", query.text
+ if query.type == Invidious::Search::Query::Type::Channel
+ env.set "search", "channel:#{query.channel} #{query.text}"
+ else
+ env.set "search", query.text
+ end
templated "search"
end
end
diff --git a/src/invidious/routes/watch.cr b/src/invidious/routes/watch.cr
index 5d3845c3..e5cf3716 100644
--- a/src/invidious/routes/watch.cr
+++ b/src/invidious/routes/watch.cr
@@ -76,7 +76,7 @@ module Invidious::Routes::Watch
end
env.params.query.delete_all("iv_load_policy")
- if watched && preferences.watch_history && !watched.includes? id
+ if watched && preferences.watch_history
Invidious::Database::Users.mark_watched(user.as(User), id)
end
@@ -95,31 +95,31 @@ module Invidious::Routes::Watch
if source == "youtube"
begin
- comment_html = JSON.parse(fetch_youtube_comments(id, nil, "html", locale, preferences.thin_mode, region))["contentHtml"]
+ comment_html = JSON.parse(Comments.fetch_youtube(id, nil, "html", locale, preferences.thin_mode, region))["contentHtml"]
rescue ex
if preferences.comments[1] == "reddit"
- comments, reddit_thread = fetch_reddit_comments(id)
- comment_html = template_reddit_comments(comments, locale)
+ comments, reddit_thread = Comments.fetch_reddit(id)
+ comment_html = Frontend::Comments.template_reddit(comments, locale)
- comment_html = fill_links(comment_html, "https", "www.reddit.com")
- comment_html = replace_links(comment_html)
+ comment_html = Comments.fill_links(comment_html, "https", "www.reddit.com")
+ comment_html = Comments.replace_links(comment_html)
end
end
elsif source == "reddit"
begin
- comments, reddit_thread = fetch_reddit_comments(id)
- comment_html = template_reddit_comments(comments, locale)
+ comments, reddit_thread = Comments.fetch_reddit(id)
+ comment_html = Frontend::Comments.template_reddit(comments, locale)
- comment_html = fill_links(comment_html, "https", "www.reddit.com")
- comment_html = replace_links(comment_html)
+ comment_html = Comments.fill_links(comment_html, "https", "www.reddit.com")
+ comment_html = Comments.replace_links(comment_html)
rescue ex
if preferences.comments[1] == "youtube"
- comment_html = JSON.parse(fetch_youtube_comments(id, nil, "html", locale, preferences.thin_mode, region))["contentHtml"]
+ comment_html = JSON.parse(Comments.fetch_youtube(id, nil, "html", locale, preferences.thin_mode, region))["contentHtml"]
end
end
end
else
- comment_html = JSON.parse(fetch_youtube_comments(id, nil, "html", locale, preferences.thin_mode, region))["contentHtml"]
+ comment_html = JSON.parse(Comments.fetch_youtube(id, nil, "html", locale, preferences.thin_mode, region))["contentHtml"]
end
comment_html ||= ""
@@ -259,9 +259,7 @@ module Invidious::Routes::Watch
case action
when "action_mark_watched"
- if !user.watched.includes? id
- Invidious::Database::Users.mark_watched(user, id)
- end
+ Invidious::Database::Users.mark_watched(user, id)
when "action_mark_unwatched"
Invidious::Database::Users.mark_unwatched(user, id)
else
diff --git a/src/invidious/routing.cr b/src/invidious/routing.cr
index 9e2ade3d..72ee9194 100644
--- a/src/invidious/routing.cr
+++ b/src/invidious/routing.cr
@@ -243,6 +243,7 @@ module Invidious::Routing
# Search
get "/api/v1/search", {{namespace}}::Search, :search
get "/api/v1/search/suggestions", {{namespace}}::Search, :search_suggestions
+ get "/api/v1/hashtag/:hashtag", {{namespace}}::Search, :hashtag
# Authenticated
diff --git a/src/invidious/search/processors.cr b/src/invidious/search/processors.cr
index 7e909590..25edb936 100644
--- a/src/invidious/search/processors.cr
+++ b/src/invidious/search/processors.cr
@@ -10,7 +10,7 @@ module Invidious::Search
initial_data = YoutubeAPI.search(query.text, search_params, client_config: client_config)
items, _ = extract_items(initial_data)
- return items
+ return items.reject!(Category)
end
# Search a youtube channel
@@ -32,7 +32,7 @@ module Invidious::Search
response_json = YoutubeAPI.browse(continuation)
items, _ = extract_items(response_json, "", ucid)
- return items
+ return items.reject!(Category)
end
# Search inside of user subscriptions
diff --git a/src/invidious/search/query.cr b/src/invidious/search/query.cr
index 24e79609..e38845d9 100644
--- a/src/invidious/search/query.cr
+++ b/src/invidious/search/query.cr
@@ -113,7 +113,7 @@ module Invidious::Search
case @type
when .regular?, .playlist?
- items = unnest_items(Processors.regular(self))
+ items = Processors.regular(self)
#
when .channel?
items = Processors.channel(self)
@@ -136,26 +136,5 @@ module Invidious::Search
return params
end
-
- # TODO: clean code
- private def unnest_items(all_items) : Array(SearchItem)
- items = [] of SearchItem
-
- # Light processing to flatten search results out of Categories.
- # They should ideally be supported in the future.
- all_items.each do |i|
- if i.is_a? Category
- i.contents.each do |nest_i|
- if !nest_i.is_a? Video
- items << nest_i
- end
- end
- else
- items << i
- end
- end
-
- return items
- end
end
end
diff --git a/src/invidious/trending.cr b/src/invidious/trending.cr
index 134eb437..2d9f8a83 100644
--- a/src/invidious/trending.cr
+++ b/src/invidious/trending.cr
@@ -17,7 +17,24 @@ def fetch_trending(trending_type, region, locale)
client_config = YoutubeAPI::ClientConfig.new(region: region)
initial_data = YoutubeAPI.browse("FEtrending", params: params, client_config: client_config)
- trending = extract_videos(initial_data)
- return {trending, plid}
+ items, _ = extract_items(initial_data)
+
+ extracted = [] of SearchItem
+
+ items.each do |itm|
+ if itm.is_a?(Category)
+ # Ignore the smaller categories, as they generally contain a sponsored
+ # channel, which brings a lot of noise on the trending page.
+ # See: https://github.com/iv-org/invidious/issues/2989
+ next if itm.contents.size < 24
+
+ extracted.concat extract_category(itm)
+ else
+ extracted << itm
+ end
+ end
+
+ # Deduplicate items before returning results
+ return extracted.select(SearchVideo).uniq!(&.id), plid
end
diff --git a/src/invidious/user/imports.cr b/src/invidious/user/imports.cr
index 20ae0d47..e4b25156 100644
--- a/src/invidious/user/imports.cr
+++ b/src/invidious/user/imports.cr
@@ -30,6 +30,60 @@ struct Invidious::User
return subscriptions
end
+ def parse_playlist_export_csv(user : User, raw_input : String)
+ # Split the input into head and body content
+ raw_head, raw_body = raw_input.split("\n\n", limit: 2, remove_empty: true)
+
+ # Create the playlist from the head content
+ csv_head = CSV.new(raw_head, headers: true)
+ csv_head.next
+ title = csv_head[4]
+ description = csv_head[5]
+ visibility = csv_head[6]
+
+ if visibility.compare("Public", case_insensitive: true) == 0
+ privacy = PlaylistPrivacy::Public
+ else
+ privacy = PlaylistPrivacy::Private
+ end
+
+ playlist = create_playlist(title, privacy, user)
+ Invidious::Database::Playlists.update_description(playlist.id, description)
+
+ # Add each video to the playlist from the body content
+ csv_body = CSV.new(raw_body, headers: true)
+ csv_body.each do |row|
+ video_id = row[0]
+ if playlist
+ next if !video_id
+ next if video_id == "Video Id"
+
+ begin
+ video = get_video(video_id)
+ rescue ex
+ next
+ end
+
+ playlist_video = PlaylistVideo.new({
+ title: video.title,
+ id: video.id,
+ author: video.author,
+ ucid: video.ucid,
+ length_seconds: video.length_seconds,
+ published: video.published,
+ plid: playlist.id,
+ live_now: video.live_now,
+ index: Random::Secure.rand(0_i64..Int64::MAX),
+ })
+
+ Invidious::Database::PlaylistVideos.insert(playlist_video)
+ Invidious::Database::Playlists.update_video_added(playlist.id, playlist_video.index)
+ end
+ end
+
+ return playlist
+ end
+
# -------------------
# Invidious
# -------------------
@@ -48,7 +102,7 @@ struct Invidious::User
if data["watch_history"]?
user.watched += data["watch_history"].as_a.map(&.as_s)
- user.watched.uniq!
+ user.watched.reverse!.uniq!.reverse!
Invidious::Database::Users.update_watch_history(user)
end
@@ -149,6 +203,21 @@ struct Invidious::User
return true
end
+ def from_youtube_pl(user : User, body : String, filename : String, type : String) : Bool
+ extension = filename.split(".").last
+
+ if extension == "csv" || type == "text/csv"
+ playlist = parse_playlist_export_csv(user, body)
+ if playlist
+ return true
+ else
+ return false
+ end
+ else
+ return false
+ end
+ end
+
# -------------------
# Freetube
# -------------------
diff --git a/src/invidious/videos/description.cr b/src/invidious/videos/description.cr
new file mode 100644
index 00000000..542cb416
--- /dev/null
+++ b/src/invidious/videos/description.cr
@@ -0,0 +1,64 @@
+require "json"
+require "uri"
+
+private def copy_string(str : String::Builder, iter : Iterator, count : Int) : Int
+ copied = 0
+ while copied < count
+ cp = iter.next
+ break if cp.is_a?(Iterator::Stop)
+
+ str << cp.chr
+
+ # A codepoint from the SMP counts twice
+ copied += 1 if cp > 0xFFFF
+ copied += 1
+ end
+
+ return copied
+end
+
+def parse_description(desc, video_id : String) : String?
+ return "" if desc.nil?
+
+ content = desc["content"].as_s
+ return "" if content.empty?
+
+ commands = desc["commandRuns"]?.try &.as_a
+ return content if commands.nil?
+
+ # Not everything is stored in UTF-8 on youtube's side. The SMP codepoints
+ # (0x10000 and above) are encoded as UTF-16 surrogate pairs, which are
+ # automatically decoded by the JSON parser. It means that we need to count
+ # copied byte in a special manner, preventing the use of regular string copy.
+ iter = content.each_codepoint
+
+ index = 0
+
+ return String.build do |str|
+ commands.each do |command|
+ cmd_start = command["startIndex"].as_i
+ cmd_length = command["length"].as_i
+
+ # Copy the text chunk between this command and the previous if needed.
+ length = cmd_start - index
+ index += copy_string(str, iter, length)
+
+ # We need to copy the command's text using the iterator
+ # and the special function defined above.
+ cmd_content = String.build(cmd_length) do |str2|
+ copy_string(str2, iter, cmd_length)
+ end
+
+ link = cmd_content
+ if on_tap = command.dig?("onTap", "innertubeCommand")
+ link = parse_link_endpoint(on_tap, cmd_content, video_id)
+ end
+ str << link
+ index += cmd_length
+ end
+
+ # Copy the end of the string (past the last command).
+ remaining_length = content.size - index
+ copy_string(str, iter, remaining_length) if remaining_length > 0
+ end
+end
diff --git a/src/invidious/videos/parser.cr b/src/invidious/videos/parser.cr
index 13ee5f65..2e8eecc3 100644
--- a/src/invidious/videos/parser.cr
+++ b/src/invidious/videos/parser.cr
@@ -284,8 +284,10 @@ def parse_video_info(video_id : String, player_response : Hash(String, JSON::Any
description = microformat.dig?("description", "simpleText").try &.as_s || ""
short_description = player_response.dig?("videoDetails", "shortDescription")
- description_html = video_secondary_renderer.try &.dig?("description", "runs")
- .try &.as_a.try { |t| content_to_comment_html(t, video_id) }
+ # description_html = video_secondary_renderer.try &.dig?("description", "runs")
+ # .try &.as_a.try { |t| content_to_comment_html(t, video_id) }
+
+ description_html = parse_description(video_secondary_renderer.try &.dig?("attributedDescription"), video_id)
# Video metadata
diff --git a/src/invidious/views/community.ecr b/src/invidious/views/community.ecr
index 9e11d562..24efc34e 100644
--- a/src/invidious/views/community.ecr
+++ b/src/invidious/views/community.ecr
@@ -27,7 +27,7 @@
</div>
<% else %>
<div class="h-box pure-g" id="comments">
- <%= template_youtube_comments(items.not_nil!, locale, thin_mode) %>
+ <%= IV::Frontend::Comments.template_youtube(items.not_nil!, locale, thin_mode) %>
</div>
<% end %>
diff --git a/src/invidious/views/components/channel_info.ecr b/src/invidious/views/components/channel_info.ecr
index f216359f..59888760 100644
--- a/src/invidious/views/components/channel_info.ecr
+++ b/src/invidious/views/components/channel_info.ecr
@@ -1,6 +1,6 @@
<% if channel.banner %>
<div class="h-box">
- <img style="width:100%" src="/ggpht<%= URI.parse(channel.banner.not_nil!.gsub("=w1060-", "=w1280-")).request_target %>">
+ <img style="width:100%" src="/ggpht<%= URI.parse(channel.banner.not_nil!.gsub("=w1060-", "=w1280-")).request_target %>" alt="" />
</div>
<div class="h-box">
@@ -11,7 +11,7 @@
<div class="pure-g h-box">
<div class="pure-u-2-3">
<div class="channel-profile">
- <img src="/ggpht<%= channel_profile_pic %>">
+ <img src="/ggpht<%= channel_profile_pic %>" alt="" />
<span><%= author %></span><% if !channel.verified.nil? && channel.verified %>&nbsp;<i class="icon ion ion-md-checkmark-circle"></i><% end %>
</div>
</div>
diff --git a/src/invidious/views/components/item.ecr b/src/invidious/views/components/item.ecr
index fa12374f..7cfd38db 100644
--- a/src/invidious/views/components/item.ecr
+++ b/src/invidious/views/components/item.ecr
@@ -7,7 +7,7 @@
<a href="/channel/<%= item.ucid %>">
<% if !env.get("preferences").as(Preferences).thin_mode %>
<center>
- <img loading="lazy" tabindex="-1" style="width:56.25%" src="/ggpht<%= URI.parse(item.author_thumbnail).request_target.gsub(/=s\d+/, "=s176") %>"/>
+ <img loading="lazy" tabindex="-1" style="width:56.25%" src="/ggpht<%= URI.parse(item.author_thumbnail).request_target.gsub(/=s\d+/, "=s176") %>" alt="" />
</center>
<% end %>
<p dir="auto"><%= HTML.escape(item.author) %><% if !item.author_verified.nil? && item.author_verified %>&nbsp;<i class="icon ion ion-md-checkmark-circle"></i><% end %></p>
@@ -25,7 +25,7 @@
<a style="width:100%" href="<%= url %>">
<% if !env.get("preferences").as(Preferences).thin_mode %>
<div class="thumbnail">
- <img loading="lazy" tabindex="-1" class="thumbnail" src="<%= URI.parse(item.thumbnail || "/").request_target %>"/>
+ <img loading="lazy" tabindex="-1" class="thumbnail" src="<%= URI.parse(item.thumbnail || "/").request_target %>" alt="" />
<p class="length"><%= translate_count(locale, "generic_videos_count", item.video_count, NumberFormatting::Separator) %></p>
</div>
<% end %>
@@ -38,7 +38,7 @@
<a href="/watch?v=<%= item.id %>&list=<%= item.rdid %>">
<% if !env.get("preferences").as(Preferences).thin_mode %>
<div class="thumbnail">
- <img loading="lazy" tabindex="-1" class="thumbnail" src="/vi/<%= item.id %>/mqdefault.jpg"/>
+ <img loading="lazy" tabindex="-1" class="thumbnail" src="/vi/<%= item.id %>/mqdefault.jpg" alt="" />
<% if item.length_seconds != 0 %>
<p class="length"><%= recode_length_seconds(item.length_seconds) %></p>
<% end %>
@@ -58,7 +58,7 @@
<a style="width:100%" href="/watch?v=<%= item.id %>&list=<%= item.plid %>&index=<%= item.index %>">
<% if !env.get("preferences").as(Preferences).thin_mode %>
<div class="thumbnail">
- <img loading="lazy" tabindex="-1" class="thumbnail" src="/vi/<%= item.id %>/mqdefault.jpg"/>
+ <img loading="lazy" tabindex="-1" class="thumbnail" src="/vi/<%= item.id %>/mqdefault.jpg" alt="" />
<% if plid_form = env.get?("remove_playlist_items") %>
<form data-onsubmit="return_false" action="/playlist_ajax?action_remove_video=1&set_video_id=<%= item.index %>&playlist_id=<%= plid_form %>&referer=<%= env.get("current_page") %>" method="post">
@@ -112,7 +112,7 @@
<a style="width:100%" href="/watch?v=<%= item.id %>">
<% if !env.get("preferences").as(Preferences).thin_mode %>
<div class="thumbnail">
- <img loading="lazy" tabindex="-1" class="thumbnail" src="/vi/<%= item.id %>/mqdefault.jpg"/>
+ <img loading="lazy" tabindex="-1" class="thumbnail" src="/vi/<%= item.id %>/mqdefault.jpg" alt="" />
<% if env.get? "show_watched" %>
<form data-onsubmit="return_false" action="/watch_ajax?action_mark_watched=1&id=<%= item.id %>&referer=<%= env.get("current_page") %>" method="post">
<input type="hidden" name="csrf_token" value="<%= HTML.escape(env.get?("csrf_token").try &.as(String) || "") %>">
diff --git a/src/invidious/views/feeds/history.ecr b/src/invidious/views/feeds/history.ecr
index 471d21db..2234b297 100644
--- a/src/invidious/views/feeds/history.ecr
+++ b/src/invidious/views/feeds/history.ecr
@@ -34,7 +34,7 @@
<a style="width:100%" href="/watch?v=<%= item %>">
<% if !env.get("preferences").as(Preferences).thin_mode %>
<div class="thumbnail">
- <img class="thumbnail" src="/vi/<%= item %>/mqdefault.jpg"/>
+ <img class="thumbnail" src="/vi/<%= item %>/mqdefault.jpg" alt="" />
<form data-onsubmit="return_false" action="/watch_ajax?action_mark_unwatched=1&id=<%= item %>&referer=<%= env.get("current_page") %>" method="post">
<input type="hidden" name="csrf_token" value="<%= URI.encode_www_form(env.get?("csrf_token").try &.as(String) || "") %>">
<p class="watched">
diff --git a/src/invidious/views/feeds/playlists.ecr b/src/invidious/views/feeds/playlists.ecr
index e52a7707..2a4b6edd 100644
--- a/src/invidious/views/feeds/playlists.ecr
+++ b/src/invidious/views/feeds/playlists.ecr
@@ -5,12 +5,19 @@
<%= rendered "components/feed_menu" %>
<div class="pure-g h-box">
- <div class="pure-u-2-3">
+ <div class="pure-u-1-3">
<h3><%= translate(locale, "user_created_playlists", %(<span id="count">#{items_created.size}</span>)) %></h3>
</div>
- <div class="pure-u-1-3" style="text-align:right">
- <h3>
- <a href="/create_playlist?referer=<%= URI.encode_www_form(referer) %>"><%= translate(locale, "Create playlist") %></a>
+ <div class="pure-u-1-3">
+ <h3 style="text-align:center">
+ <a href="/create_playlist?referer=<%= URI.encode_www_form("/feed/playlists") %>"><%= translate(locale, "Create playlist") %></a>
+ </h3>
+ </div>
+ <div class="pure-u-1-3">
+ <h3 style="text-align:right">
+ <a href="/data_control?referer=<%= URI.encode_www_form("/feed/playlists") %>">
+ <%= translate(locale, "Import/export") %>
+ </a>
</h3>
</div>
</div>
diff --git a/src/invidious/views/user/data_control.ecr b/src/invidious/views/user/data_control.ecr
index a451159f..27654b40 100644
--- a/src/invidious/views/user/data_control.ecr
+++ b/src/invidious/views/user/data_control.ecr
@@ -8,7 +8,7 @@
<legend><%= translate(locale, "Import") %></legend>
<div class="pure-control-group">
- <label for="import_youtube"><%= translate(locale, "Import Invidious data") %></label>
+ <label for="import_invidious"><%= translate(locale, "Import Invidious data") %></label>
<input type="file" id="import_invidious" name="import_invidious">
</div>
@@ -22,6 +22,11 @@
</div>
<div class="pure-control-group">
+ <label for="import_youtube_pl"><%= translate(locale, "Import YouTube playlist (.csv)") %></label>
+ <input type="file" id="import_youtube_pl" name="import_youtube_pl">
+ </div>
+
+ <div class="pure-control-group">
<label for="import_freetube"><%= translate(locale, "Import FreeTube subscriptions (.db)") %></label>
<input type="file" id="import_freetube" name="import_freetube">
</div>
diff --git a/src/invidious/views/watch.ecr b/src/invidious/views/watch.ecr
index a3ec94e8..5b3190f3 100644
--- a/src/invidious/views/watch.ecr
+++ b/src/invidious/views/watch.ecr
@@ -208,7 +208,7 @@ we're going to need to do it here in order to allow for translations.
<a href="/channel/<%= video.ucid %>" style="display:block;width:fit-content;width:-moz-fit-content">
<div class="channel-profile">
<% if !video.author_thumbnail.empty? %>
- <img src="/ggpht<%= URI.parse(video.author_thumbnail).request_target %>">
+ <img src="/ggpht<%= URI.parse(video.author_thumbnail).request_target %>" alt="" />
<% end %>
<span id="channel-name"><%= author %><% if !video.author_verified.nil? && video.author_verified %>&nbsp;<i class="icon ion ion-md-checkmark-circle"></i><% end %></span>
</div>
@@ -298,7 +298,7 @@ we're going to need to do it here in order to allow for translations.
<a href="/watch?v=<%= rv["id"] %>&listen=<%= params.listen %>">
<% if !env.get("preferences").as(Preferences).thin_mode %>
<div class="thumbnail">
- <img loading="lazy" class="thumbnail" src="/vi/<%= rv["id"] %>/mqdefault.jpg">
+ <img loading="lazy" class="thumbnail" src="/vi/<%= rv["id"] %>/mqdefault.jpg" alt="" />
<p class="length"><%= recode_length_seconds(rv["length_seconds"]?.try &.to_i? || 0) %></p>
</div>
<% end %>
diff --git a/src/invidious/yt_backend/extractors.cr b/src/invidious/yt_backend/extractors.cr
index 1a37d606..6686e6e7 100644
--- a/src/invidious/yt_backend/extractors.cr
+++ b/src/invidious/yt_backend/extractors.cr
@@ -268,7 +268,7 @@ private module Parsers
end
private def self.parse(item_contents, author_fallback)
- title = item_contents["title"]["simpleText"]?.try &.as_s || ""
+ title = extract_text(item_contents["title"]) || ""
plid = item_contents["playlistId"]?.try &.as_s || ""
video_count = HelperExtractors.get_video_count(item_contents)
@@ -381,7 +381,7 @@ private module Parsers
# Parses an InnerTube itemSectionRenderer into a SearchVideo.
# Returns nil when the given object isn't a ItemSectionRenderer
#
- # A itemSectionRenderer seems to be a simple wrapper for a videoRenderer, used
+ # A itemSectionRenderer seems to be a simple wrapper for a videoRenderer or a playlistRenderer, used
# by the result page for channel searches. It is located inside a continuationItems
# container.It is very similar to RichItemRendererParser
#
@@ -394,6 +394,8 @@ private module Parsers
private def self.parse(item_contents, author_fallback)
child = VideoRendererParser.process(item_contents, author_fallback)
+ child ||= PlaylistRendererParser.process(item_contents, author_fallback)
+
return child
end
@@ -448,44 +450,43 @@ private module Parsers
"overlay", "reelPlayerOverlayRenderer"
)
- # Sometimes, the "reelPlayerOverlayRenderer" object is missing the
- # important part of the response. We use this exception to tell
- # the calling function to fetch the content again.
- if !reel_player_overlay.as_h.has_key?("reelPlayerHeaderSupportedRenderers")
- raise RetryOnceException.new
- end
-
- video_details_container = reel_player_overlay.dig(
- "reelPlayerHeaderSupportedRenderers",
- "reelPlayerHeaderRenderer"
- )
+ if video_details_container = reel_player_overlay.dig?(
+ "reelPlayerHeaderSupportedRenderers",
+ "reelPlayerHeaderRenderer"
+ )
+ # Author infos
- # Author infos
+ author = video_details_container
+ .dig?("channelTitleText", "runs", 0, "text")
+ .try &.as_s || author_fallback.name
- author = video_details_container
- .dig?("channelTitleText", "runs", 0, "text")
- .try &.as_s || author_fallback.name
+ ucid = video_details_container
+ .dig?("channelNavigationEndpoint", "browseEndpoint", "browseId")
+ .try &.as_s || author_fallback.id
- ucid = video_details_container
- .dig?("channelNavigationEndpoint", "browseEndpoint", "browseId")
- .try &.as_s || author_fallback.id
+ # Title & publication date
- # Title & publication date
+ title = video_details_container.dig?("reelTitleText")
+ .try { |t| extract_text(t) } || ""
- title = video_details_container.dig?("reelTitleText")
- .try { |t| extract_text(t) } || ""
-
- published = video_details_container
- .dig?("timestampText", "simpleText")
- .try { |t| decode_date(t.as_s) } || Time.utc
+ published = video_details_container
+ .dig?("timestampText", "simpleText")
+ .try { |t| decode_date(t.as_s) } || Time.utc
+ # View count
+ view_count_text = video_details_container.dig?("viewCountText", "simpleText")
+ else
+ author = author_fallback.name
+ ucid = author_fallback.id
+ published = Time.utc
+ title = item_contents.dig?("headline", "simpleText").try &.as_s || ""
+ end
# View count
# View count used to be in the reelWatchEndpoint, but that changed?
- view_count_text = item_contents.dig?("viewCountText", "simpleText")
- view_count_text ||= video_details_container.dig?("viewCountText", "simpleText")
+ view_count_text ||= item_contents.dig?("viewCountText", "simpleText")
- view_count = view_count_text.try &.as_s.gsub(/\D+/, "").to_i64? || 0_i64
+ view_count = short_text_to_number(view_count_text.try &.as_s || "0")
# Duration
diff --git a/src/invidious/yt_backend/extractors_utils.cr b/src/invidious/yt_backend/extractors_utils.cr
index 0cb3c079..11d95958 100644
--- a/src/invidious/yt_backend/extractors_utils.cr
+++ b/src/invidious/yt_backend/extractors_utils.cr
@@ -68,19 +68,17 @@ rescue ex
return false
end
-def extract_videos(initial_data : Hash(String, JSON::Any), author_fallback : String? = nil, author_id_fallback : String? = nil) : Array(SearchVideo)
- extracted, _ = extract_items(initial_data, author_fallback, author_id_fallback)
+# This function extracts SearchVideo items from a Category.
+# Categories are commonly returned in search results and trending pages.
+def extract_category(category : Category) : Array(SearchVideo)
+ return category.contents.select(SearchVideo)
+end
- target = [] of (SearchItem | Continuation)
- extracted.each do |i|
- if i.is_a?(Category)
- i.contents.each { |cate_i| target << cate_i if !cate_i.is_a? Video }
- else
- target << i
- end
+# :ditto:
+def extract_category(category : Category, &)
+ category.contents.select(SearchVideo).each do |item|
+ yield item
end
-
- return target.select(SearchVideo)
end
def extract_selected_tab(tabs)