summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorSamantaz Fox <coding@samantaz.fr>2021-04-01 02:36:43 +0000
committerSamantaz Fox <coding@samantaz.fr>2021-05-24 13:19:28 +0200
commit43bd331e48ad1a19cd3c7a6d5beb72e3127c5edc (patch)
tree0904a996afbf1cb1e8c2fbc51d293844108f3846 /src
parent8bbb016fa40d4ee60aeac2f3e65349be0f71b7d6 (diff)
downloadinvidious-43bd331e48ad1a19cd3c7a6d5beb72e3127c5edc.tar.gz
invidious-43bd331e48ad1a19cd3c7a6d5beb72e3127c5edc.tar.bz2
invidious-43bd331e48ad1a19cd3c7a6d5beb72e3127c5edc.zip
Multiple youtube_api.cr helper fixes
Add documentation Bump web client version string Add charset=UTF-8 to the 'content-type' header Parse JSON and return it as a Hash Handle API error messages
Diffstat (limited to 'src')
-rw-r--r--src/invidious/channels.cr40
-rw-r--r--src/invidious/helpers/youtube_api.cr29
-rw-r--r--src/invidious/playlists.cr2
-rw-r--r--src/invidious/search.cr3
4 files changed, 36 insertions, 38 deletions
diff --git a/src/invidious/channels.cr b/src/invidious/channels.cr
index 3109b508..bbef3d4f 100644
--- a/src/invidious/channels.cr
+++ b/src/invidious/channels.cr
@@ -229,22 +229,8 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
page = 1
LOGGER.trace("fetch_channel: #{ucid} : Downloading channel videos page")
- response_body = get_channel_videos_response(ucid, page, auto_generated: auto_generated)
-
- videos = [] of SearchVideo
- begin
- initial_data = JSON.parse(response_body)
- raise InfoException.new("Could not extract channel JSON") if !initial_data
-
- LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel videos page initial_data")
- videos = extract_videos(initial_data.as_h, author, ucid)
- rescue ex
- if response_body.includes?("To continue with your YouTube experience, please fill out the form below.") ||
- response_body.includes?("https://www.google.com/sorry/index")
- raise InfoException.new("Could not extract channel info. Instance is likely blocked.")
- end
- raise ex
- end
+ initial_data = get_channel_videos_response(ucid, page, auto_generated: auto_generated)
+ videos = extract_videos(initial_data, author, ucid)
LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel RSS feed")
rss.xpath_nodes("//feed/entry").each do |entry|
@@ -304,10 +290,8 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
ids = [] of String
loop do
- response_body = get_channel_videos_response(ucid, page, auto_generated: auto_generated)
- initial_data = JSON.parse(response_body)
- raise InfoException.new("Could not extract channel JSON") if !initial_data
- videos = extract_videos(initial_data.as_h, author, ucid)
+ initial_data = get_channel_videos_response(ucid, page, auto_generated: auto_generated)
+ videos = extract_videos(initial_data, author, ucid)
count = videos.size
videos = videos.map { |video| ChannelVideo.new({
@@ -358,8 +342,7 @@ end
def fetch_channel_playlists(ucid, author, continuation, sort_by)
if continuation
response_json = request_youtube_api_browse(continuation)
- result = JSON.parse(response_json)
- continuationItems = result["onResponseReceivedActions"]?
+ continuationItems = response_json["onResponseReceivedActions"]?
.try &.[0]["appendContinuationItemsAction"]["continuationItems"]
return [] of SearchItem, nil if !continuationItems
@@ -964,21 +947,16 @@ def get_60_videos(ucid, author, page, auto_generated, sort_by = "newest")
videos = [] of SearchVideo
2.times do |i|
- response_json = get_channel_videos_response(ucid, page * 2 + (i - 1), auto_generated: auto_generated, sort_by: sort_by)
- initial_data = JSON.parse(response_json)
- break if !initial_data
- videos.concat extract_videos(initial_data.as_h, author, ucid)
+ initial_data = get_channel_videos_response(ucid, page * 2 + (i - 1), auto_generated: auto_generated, sort_by: sort_by)
+ videos.concat extract_videos(initial_data, author, ucid)
end
return videos.size, videos
end
def get_latest_videos(ucid)
- response_json = get_channel_videos_response(ucid)
- initial_data = JSON.parse(response_json)
- return [] of SearchVideo if !initial_data
+ initial_data = get_channel_videos_response(ucid)
author = initial_data["metadata"]?.try &.["channelMetadataRenderer"]?.try &.["title"]?.try &.as_s
- items = extract_videos(initial_data.as_h, author, ucid)
- return items
+ return extract_videos(initial_data, author, ucid)
end
diff --git a/src/invidious/helpers/youtube_api.cr b/src/invidious/helpers/youtube_api.cr
index 30413532..84e0c38f 100644
--- a/src/invidious/helpers/youtube_api.cr
+++ b/src/invidious/helpers/youtube_api.cr
@@ -4,8 +4,18 @@
# Hard-coded constants required by the API
HARDCODED_API_KEY = "AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8"
-HARDCODED_CLIENT_VERS = "2.20210318.08.00"
+HARDCODED_CLIENT_VERS = "2.20210330.08.00"
+####################################################################
+# request_youtube_api_browse(continuation)
+#
+# Requests the youtubei/vi/browse endpoint with the required headers
+# to get JSON in en-US (english US).
+#
+# The requested data is a continuation token (ctoken). Depending on
+# this token's contents, the returned data can be comments, playlist
+# videos, search results, channel community tab, ...
+#
def request_youtube_api_browse(continuation)
# JSON Request data, required by the API
data = {
@@ -20,12 +30,23 @@ def request_youtube_api_browse(continuation)
"continuation": continuation,
}
- # Send the POST request and return result
+ # Send the POST request and parse result
response = YT_POOL.client &.post(
"/youtubei/v1/browse?key=#{HARDCODED_API_KEY}",
- headers: HTTP::Headers{"content-type" => "application/json"},
+ headers: HTTP::Headers{"content-type" => "application/json; charset=UTF-8"},
body: data.to_json
)
- return response.body
+ initial_data = JSON.parse(response.body).as_h
+
+ # Error handling
+ if initial_data.has_key?("error")
+ code = initial_data["error"]["code"]
+ message = initial_data["error"]["message"].to_s.sub(/(\\n)+\^$/, "")
+
+ raise InfoException.new("Could not extract JSON. Youtube API returned \
+ error #{code} with message:<br>\"#{message}\"")
+ end
+
+ return initial_data
end
diff --git a/src/invidious/playlists.cr b/src/invidious/playlists.cr
index 073a9986..150f1c15 100644
--- a/src/invidious/playlists.cr
+++ b/src/invidious/playlists.cr
@@ -451,7 +451,7 @@ def get_playlist_videos(db, playlist, offset, locale = nil, continuation = nil)
offset = (offset / 100).to_i64 * 100_i64
ctoken = produce_playlist_continuation(playlist.id, offset)
- initial_data = JSON.parse(request_youtube_api_browse(ctoken)).as_h
+ initial_data = request_youtube_api_browse(ctoken)
else
response = YT_POOL.client &.get("/playlist?list=#{playlist.id}&gl=US&hl=en")
initial_data = extract_initial_data(response.body)
diff --git a/src/invidious/search.cr b/src/invidious/search.cr
index 4b216613..7c9c389e 100644
--- a/src/invidious/search.cr
+++ b/src/invidious/search.cr
@@ -246,8 +246,7 @@ def channel_search(query, page, channel)
continuation = produce_channel_search_continuation(ucid, query, page)
response_json = request_youtube_api_browse(continuation)
- result = JSON.parse(response_json)
- continuationItems = result["onResponseReceivedActions"]?
+ continuationItems = response_json["onResponseReceivedActions"]?
.try &.[0]["appendContinuationItemsAction"]["continuationItems"]
return 0, [] of SearchItem if !continuationItems