summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorOmar Roth <omarroth@hotmail.com>2019-02-15 17:28:54 -0600
committerOmar Roth <omarroth@hotmail.com>2019-02-15 17:28:54 -0600
commit27663b10a256cd3ddb31b3b5e013daeaf577155f (patch)
treebd4e3c535ec2ed279065ad59d6fbc1703c86603a
parentc099a5ad2e42616ee96a5da41cf27083606ac692 (diff)
downloadinvidious-27663b10a256cd3ddb31b3b5e013daeaf577155f.tar.gz
invidious-27663b10a256cd3ddb31b3b5e013daeaf577155f.tar.bz2
invidious-27663b10a256cd3ddb31b3b5e013daeaf577155f.zip
Add minor API fixes
-rw-r--r--src/invidious.cr5
-rw-r--r--src/invidious/channels.cr128
-rw-r--r--src/invidious/helpers/helpers.cr113
-rw-r--r--src/invidious/mixes.cr5
-rw-r--r--src/invidious/playlists.cr111
5 files changed, 235 insertions, 127 deletions
diff --git a/src/invidious.cr b/src/invidious.cr
index 93f63dfe..222b82ae 100644
--- a/src/invidious.cr
+++ b/src/invidious.cr
@@ -3031,7 +3031,8 @@ end
ucid = env.params.url["ucid"]
page = env.params.query["page"]?.try &.to_i?
page ||= 1
- sort_by = env.params.query["sort_by"]?.try &.downcase
+ sort_by = env.params.query["sort"]?.try &.downcase
+ sort_by ||= env.params.query["sort_by"]?.try &.downcase
sort_by ||= "newest"
begin
@@ -3436,7 +3437,7 @@ get "/api/v1/mixes/:rdid" do |env|
rdid = env.params.url["rdid"]
continuation = env.params.query["continuation"]?
- continuation ||= rdid.lchop("RD")
+ continuation ||= rdid.lchop("RD")[0, 11]
format = env.params.query["format"]?
format ||= "json"
diff --git a/src/invidious/channels.cr b/src/invidious/channels.cr
index ccaf2487..b6692919 100644
--- a/src/invidious/channels.cr
+++ b/src/invidious/channels.cr
@@ -260,6 +260,132 @@ def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "
return url
end
+def produce_channel_playlists_url(ucid, cursor, sort = "newest", auto_generated = false)
+ if !auto_generated
+ cursor = Base64.urlsafe_encode(cursor, false)
+ end
+
+ meta = IO::Memory.new
+
+ if auto_generated
+ meta.write(Bytes[0x08, 0x0a])
+ end
+
+ meta.write(Bytes[0x12, 0x09])
+ meta.print("playlists")
+
+ if auto_generated
+ meta.write(Bytes[0x20, 0x32])
+ else
+ # TODO: Look at 0x01, 0x00
+ case sort
+ when "oldest", "oldest_created"
+ meta.write(Bytes[0x18, 0x02])
+ when "newest", "newest_created"
+ meta.write(Bytes[0x18, 0x03])
+ when "last", "last_added"
+ meta.write(Bytes[0x18, 0x04])
+ end
+
+ meta.write(Bytes[0x20, 0x01])
+ end
+
+ meta.write(Bytes[0x30, 0x02])
+ meta.write(Bytes[0x38, 0x01])
+ meta.write(Bytes[0x60, 0x01])
+ meta.write(Bytes[0x6a, 0x00])
+
+ meta.write(Bytes[0x7a, cursor.size])
+ meta.print(cursor)
+
+ meta.write(Bytes[0xb8, 0x01, 0x00])
+
+ meta.rewind
+ meta = Base64.urlsafe_encode(meta.to_slice)
+ meta = URI.escape(meta)
+
+ continuation = IO::Memory.new
+ continuation.write(Bytes[0x12, ucid.size])
+ continuation.print(ucid)
+
+ continuation.write(Bytes[0x1a])
+ continuation.write(write_var_int(meta.size))
+ continuation.print(meta)
+
+ continuation.rewind
+ continuation = continuation.gets_to_end
+
+ wrapper = IO::Memory.new
+ wrapper.write(Bytes[0xe2, 0xa9, 0x85, 0xb2, 0x02])
+ wrapper.write(write_var_int(continuation.size))
+ wrapper.print(continuation)
+ wrapper.rewind
+
+ wrapper = Base64.urlsafe_encode(wrapper.to_slice)
+ wrapper = URI.escape(wrapper)
+
+ url = "/browse_ajax?continuation=#{wrapper}&gl=US&hl=en"
+
+ return url
+end
+
+def extract_channel_playlists_cursor(url, auto_generated)
+ wrapper = HTTP::Params.parse(URI.parse(url).query.not_nil!)["continuation"]
+
+ wrapper = URI.unescape(wrapper)
+ wrapper = Base64.decode(wrapper)
+
+ # 0xe2 0xa9 0x85 0xb2 0x02
+ wrapper += 5
+
+ continuation_size = read_var_int(wrapper[0, 4])
+ wrapper += write_var_int(continuation_size).size
+ continuation = wrapper[0, continuation_size]
+
+ # 0x12
+ continuation += 1
+ ucid_size = continuation[0]
+ continuation += 1
+ ucid = continuation[0, ucid_size]
+ continuation += ucid_size
+
+ # 0x1a
+ continuation += 1
+ meta_size = read_var_int(continuation[0, 4])
+ continuation += write_var_int(meta_size).size
+ meta = continuation[0, meta_size]
+ continuation += meta_size
+
+ meta = String.new(meta)
+ meta = URI.unescape(meta)
+ meta = Base64.decode(meta)
+
+ # 0x12 0x09 playlists
+ meta += 11
+
+ until meta[0] == 0x7a
+ tag = read_var_int(meta[0, 4])
+ meta += write_var_int(tag).size
+ value = meta[0]
+ meta += 1
+ end
+
+ # 0x7a
+ meta += 1
+ cursor_size = meta[0]
+ meta += 1
+ cursor = meta[0, cursor_size]
+
+ cursor = String.new(cursor)
+
+ if !auto_generated
+ cursor = URI.unescape(cursor)
+ cursor = Base64.decode_string(cursor)
+ end
+
+ return cursor
+end
+
def get_about_info(ucid, locale)
client = make_client(YT_URL)
@@ -290,7 +416,7 @@ def get_about_info(ucid, locale)
sub_count ||= 0
author = about.xpath_node(%q(//span[contains(@class,"qualified-channel-title-text")]/a)).not_nil!.content
- ucid = about.xpath_node(%q(//link[@rel="canonical"])).not_nil!["href"].split("/")[-1]
+ ucid = about.xpath_node(%q(//meta[@itemprop="channelId"])).not_nil!["content"]
# Auto-generated channels
# https://support.google.com/youtube/answer/2579942
diff --git a/src/invidious/helpers/helpers.cr b/src/invidious/helpers/helpers.cr
index 942757c3..45ebc4dd 100644
--- a/src/invidious/helpers/helpers.cr
+++ b/src/invidious/helpers/helpers.cr
@@ -166,35 +166,33 @@ def extract_videos(nodeset, ucid = nil)
videos.map { |video| video.as(SearchVideo) }
end
-def extract_items(nodeset, ucid = nil)
+def extract_items(nodeset, ucid = nil, author_name = nil)
# TODO: Make this a 'common', so it makes more sense to be used here
items = [] of SearchItem
nodeset.each do |node|
- anchor = node.xpath_node(%q(.//h3[contains(@class,"yt-lockup-title")]/a))
+ anchor = node.xpath_node(%q(.//h3[contains(@class, "yt-lockup-title")]/a))
if !anchor
next
end
+ title = anchor.content.strip
+ id = anchor["href"]
if anchor["href"].starts_with? "https://www.googleadservices.com"
next
end
anchor = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-byline")]/a))
- if !anchor
- author = ""
- author_id = ""
- else
+ if anchor
author = anchor.content.strip
author_id = anchor["href"].split("/")[-1]
end
- anchor = node.xpath_node(%q(.//h3[contains(@class, "yt-lockup-title")]/a))
- if !anchor
- next
- end
- title = anchor.content.strip
- id = anchor["href"]
+ author ||= author_name
+ author_id ||= ucid
+
+ author ||= ""
+ author_id ||= ""
description_html = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-description")]))
description_html, description = html_to_content(description_html)
@@ -354,3 +352,94 @@ def extract_items(nodeset, ucid = nil)
return items
end
+
+def extract_shelf_items(nodeset, ucid = nil, author_name = nil)
+ items = [] of SearchPlaylist
+
+ nodeset.each do |shelf|
+ shelf_anchor = shelf.xpath_node(%q(.//h2[contains(@class, "branded-page-module-title")]))
+
+ if !shelf_anchor
+ next
+ end
+
+ title = shelf_anchor.xpath_node(%q(.//span[contains(@class, "branded-page-module-title-text")]))
+ if title
+ title = title.content.strip
+ end
+ title ||= ""
+
+ id = shelf_anchor.xpath_node(%q(.//a)).try &.["href"]
+ if !id
+ next
+ end
+
+ is_playlist = false
+ videos = [] of SearchPlaylistVideo
+
+ shelf.xpath_nodes(%q(.//ul[contains(@class, "yt-uix-shelfslider-list")]/li)).each do |child_node|
+ type = child_node.xpath_node(%q(./div))
+ if !type
+ next
+ end
+
+ case type["class"]
+ when .includes? "yt-lockup-video"
+ is_playlist = true
+
+ anchor = child_node.xpath_node(%q(.//h3[contains(@class, "yt-lockup-title")]/a))
+ if anchor
+ video_title = anchor.content.strip
+ video_id = HTTP::Params.parse(URI.parse(anchor["href"]).query.not_nil!)["v"]
+ end
+ video_title ||= ""
+ video_id ||= ""
+
+ anchor = child_node.xpath_node(%q(.//span[@class="video-time"]))
+ if anchor
+ length_seconds = decode_length_seconds(anchor.content)
+ end
+ length_seconds ||= 0
+
+ videos << SearchPlaylistVideo.new(
+ video_title,
+ video_id,
+ length_seconds
+ )
+ when .includes? "yt-lockup-playlist"
+ anchor = child_node.xpath_node(%q(.//h3[contains(@class, "yt-lockup-title")]/a))
+ if anchor
+ playlist_title = anchor.content.strip
+ params = HTTP::Params.parse(URI.parse(anchor["href"]).query.not_nil!)
+ plid = params["list"]
+ end
+ playlist_title ||= ""
+ plid ||= ""
+
+ items << SearchPlaylist.new(
+ playlist_title,
+ plid,
+ author_name,
+ ucid,
+ 50,
+ Array(SearchPlaylistVideo).new
+ )
+ end
+ end
+
+ if is_playlist
+ plid = HTTP::Params.parse(URI.parse(id).query.not_nil!)["list"]
+
+ items << SearchPlaylist.new(
+ title,
+ plid,
+ author_name,
+ ucid,
+ videos.size,
+ videos
+ )
+ end
+ end
+
+ return items
+end
diff --git a/src/invidious/mixes.cr b/src/invidious/mixes.cr
index a56f468a..a3ada869 100644
--- a/src/invidious/mixes.cr
+++ b/src/invidious/mixes.cr
@@ -52,7 +52,10 @@ def fetch_mix(rdid, video_id, cookies = nil, locale = nil)
item = item["playlistPanelVideoRenderer"]
id = item["videoId"].as_s
- title = item["title"]["simpleText"].as_s
+ title = item["title"]?.try &.["simpleText"].as_s
+ if !title
+ next
+ end
author = item["longBylineText"]["runs"][0]["text"].as_s
ucid = item["longBylineText"]["runs"][0]["navigationEndpoint"]["browseEndpoint"]["browseId"].as_s
length_seconds = decode_length_seconds(item["lengthText"]["simpleText"].as_s)
diff --git a/src/invidious/playlists.cr b/src/invidious/playlists.cr
index 220a0ef7..28f2e4ce 100644
--- a/src/invidious/playlists.cr
+++ b/src/invidious/playlists.cr
@@ -161,117 +161,6 @@ def produce_playlist_url(id, index)
return url
end
-def produce_channel_playlists_url(ucid, cursor, sort = "newest")
- cursor = Base64.urlsafe_encode(cursor, false)
-
- meta = IO::Memory.new
- meta.write(Bytes[0x12, 0x09])
- meta.print("playlists")
-
- # TODO: Look at 0x01, 0x00
- case sort
- when "oldest", "oldest_created"
- meta.write(Bytes[0x18, 0x02])
- when "newest", "newest_created"
- meta.write(Bytes[0x18, 0x03])
- when "last", "last_added"
- meta.write(Bytes[0x18, 0x04])
- end
-
- meta.write(Bytes[0x20, 0x01])
- meta.write(Bytes[0x30, 0x02])
- meta.write(Bytes[0x38, 0x01])
- meta.write(Bytes[0x60, 0x01])
- meta.write(Bytes[0x6a, 0x00])
-
- meta.write(Bytes[0x7a, cursor.size])
- meta.print(cursor)
-
- meta.write(Bytes[0xb8, 0x01, 0x00])
-
- meta.rewind
- meta = Base64.urlsafe_encode(meta.to_slice)
- meta = URI.escape(meta)
-
- continuation = IO::Memory.new
- continuation.write(Bytes[0x12, ucid.size])
- continuation.print(ucid)
-
- continuation.write(Bytes[0x1a])
- continuation.write(write_var_int(meta.size))
- continuation.print(meta)
-
- continuation.rewind
- continuation = continuation.gets_to_end
-
- wrapper = IO::Memory.new
- wrapper.write(Bytes[0xe2, 0xa9, 0x85, 0xb2, 0x02])
- wrapper.write(write_var_int(continuation.size))
- wrapper.print(continuation)
- wrapper.rewind
-
- wrapper = Base64.urlsafe_encode(wrapper.to_slice)
- wrapper = URI.escape(wrapper)
-
- url = "/browse_ajax?continuation=#{wrapper}&gl=US&hl=en"
-
- return url
-end
-
-def extract_channel_playlists_cursor(url)
- wrapper = HTTP::Params.parse(URI.parse(url).query.not_nil!)["continuation"]
-
- wrapper = URI.unescape(wrapper)
- wrapper = Base64.decode(wrapper)
-
- # 0xe2 0xa9 0x85 0xb2 0x02
- wrapper += 5
-
- continuation_size = read_var_int(wrapper[0, 4])
- wrapper += write_var_int(continuation_size).size
- continuation = wrapper[0, continuation_size]
-
- # 0x12
- continuation += 1
- ucid_size = continuation[0]
- continuation += 1
- ucid = continuation[0, ucid_size]
- continuation += ucid_size
-
- # 0x1a
- continuation += 1
- meta_size = read_var_int(continuation[0, 4])
- continuation += write_var_int(meta_size).size
- meta = continuation[0, meta_size]
- continuation += meta_size
-
- meta = String.new(meta)
- meta = URI.unescape(meta)
- meta = Base64.decode(meta)
-
- # 0x12 0x09 playlists
- meta += 11
-
- until meta[0] == 0x7a
- tag = read_var_int(meta[0, 4])
- meta += write_var_int(tag).size
- value = meta[0]
- meta += 1
- end
-
- # 0x7a
- meta += 1
- cursor_size = meta[0]
- meta += 1
- cursor = meta[0, cursor_size]
-
- cursor = String.new(cursor)
- cursor = URI.unescape(cursor)
- cursor = Base64.decode_string(cursor)
-
- return cursor
-end
-
def fetch_playlist(plid, locale)
client = make_client(YT_URL)