summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--src/invidious/channels.cr43
-rw-r--r--src/invidious/helpers/helpers.cr7
2 files changed, 36 insertions, 14 deletions
diff --git a/src/invidious/channels.cr b/src/invidious/channels.cr
index 9986fe1b..b9808d98 100644
--- a/src/invidious/channels.cr
+++ b/src/invidious/channels.cr
@@ -233,7 +233,7 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
videos = [] of SearchVideo
begin
- initial_data = JSON.parse(response.body).as_a.find &.["response"]?
+ initial_data = JSON.parse(response.body)
raise InfoException.new("Could not extract channel JSON") if !initial_data
LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel videos page initial_data")
@@ -305,7 +305,7 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
loop do
response = get_channel_videos_response(ucid, page, auto_generated: auto_generated)
- initial_data = JSON.parse(response.body).as_a.find &.["response"]?
+ initial_data = JSON.parse(response.body)
raise InfoException.new("Could not extract channel JSON") if !initial_data
videos = extract_videos(initial_data.as_h, author, ucid)
@@ -388,7 +388,7 @@ def fetch_channel_playlists(ucid, author, auto_generated, continuation, sort_by)
return items, continuation
end
-def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "newest", v2 = false)
+def produce_channel_videos_continuation(ucid, page = 1, auto_generated = nil, sort_by = "newest", v2 = false)
object = {
"80226972:embedded" => {
"2:string" => ucid,
@@ -444,6 +444,11 @@ def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "
.try { |i| Base64.urlsafe_encode(i) }
.try { |i| URI.encode_www_form(i) }
+ return continuation
+end
+
+def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "newest", v2 = false)
+ continuation = produce_channel_videos_continuation(ucid, page, auto_generated, sort_by, v2)
return "/browse_ajax?continuation=#{continuation}&gl=US&hl=en"
end
@@ -932,9 +937,27 @@ def get_about_info(ucid, locale)
})
end
-def get_channel_videos_response(ucid, page = 1, auto_generated = nil, sort_by = "newest")
- url = produce_channel_videos_url(ucid, page, auto_generated: auto_generated, sort_by: sort_by, v2: true)
- return YT_POOL.client &.get(url)
+def get_channel_videos_response(ucid, page = 1, auto_generated = nil, sort_by = "newest", youtubei_browse = true)
+ if youtubei_browse
+ continuation = produce_channel_videos_continuation(ucid, page, auto_generated: auto_generated, sort_by: sort_by, v2: true)
+ data = {
+ "context": {
+ "client": {
+ "clientName": "WEB",
+ "clientVersion": "2.20201021.03.00",
+ },
+ },
+ "continuation": continuation,
+ }.to_json
+ return YT_POOL.client &.post(
+ "/youtubei/v1/browse?key=AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8",
+ headers: HTTP::Headers{"content-type" => "application/json"},
+ body: data
+ )
+ else
+ url = produce_channel_videos_url(ucid, page, auto_generated: auto_generated, sort_by: sort_by, v2: true)
+ return YT_POOL.client &.get(url)
+ end
end
def get_60_videos(ucid, author, page, auto_generated, sort_by = "newest")
@@ -942,7 +965,7 @@ def get_60_videos(ucid, author, page, auto_generated, sort_by = "newest")
2.times do |i|
response = get_channel_videos_response(ucid, page * 2 + (i - 1), auto_generated: auto_generated, sort_by: sort_by)
- initial_data = JSON.parse(response.body).as_a.find &.["response"]?
+ initial_data = JSON.parse(response.body)
break if !initial_data
videos.concat extract_videos(initial_data.as_h, author, ucid)
end
@@ -951,10 +974,10 @@ def get_60_videos(ucid, author, page, auto_generated, sort_by = "newest")
end
def get_latest_videos(ucid)
- response = get_channel_videos_response(ucid, 1)
- initial_data = JSON.parse(response.body).as_a.find &.["response"]?
+ response = get_channel_videos_response(ucid)
+ initial_data = JSON.parse(response.body)
return [] of SearchVideo if !initial_data
- author = initial_data["response"]?.try &.["metadata"]?.try &.["channelMetadataRenderer"]?.try &.["title"]?.try &.as_s
+ author = initial_data["metadata"]?.try &.["channelMetadataRenderer"]?.try &.["title"]?.try &.as_s
items = extract_videos(initial_data.as_h, author, ucid)
return items
diff --git a/src/invidious/helpers/helpers.cr b/src/invidious/helpers/helpers.cr
index 6452b68c..5d127e1a 100644
--- a/src/invidious/helpers/helpers.cr
+++ b/src/invidious/helpers/helpers.cr
@@ -383,10 +383,9 @@ def extract_items(initial_data : Hash(String, JSON::Any), author_fallback : Stri
items = [] of SearchItem
channel_v2_response = initial_data
- .try &.["response"]?
- .try &.["continuationContents"]?
- .try &.["gridContinuation"]?
- .try &.["items"]?
+ .try &.["continuationContents"]?
+ .try &.["gridContinuation"]?
+ .try &.["items"]?
if channel_v2_response
channel_v2_response.try &.as_a.each { |item|