diff options
Diffstat (limited to 'src/invidious.cr')
| -rw-r--r-- | src/invidious.cr | 713 |
1 files changed, 2 insertions, 711 deletions
diff --git a/src/invidious.cr b/src/invidious.cr index fe7ab769..6ac099f3 100644 --- a/src/invidious.cr +++ b/src/invidious.cr @@ -363,6 +363,8 @@ Invidious::Routing.get "/preferences", Invidious::Routes::PreferencesRoute, :sho Invidious::Routing.post "/preferences", Invidious::Routes::PreferencesRoute, :update Invidious::Routing.get "/toggle_theme", Invidious::Routes::PreferencesRoute, :toggle_theme +define_v1_api_routes() + # Users post "/watch_ajax" do |env| @@ -1637,365 +1639,6 @@ end # API Endpoints -get "/api/v1/stats" do |env| - locale = LOCALES[env.get("preferences").as(Preferences).locale]? - env.response.content_type = "application/json" - - if !CONFIG.statistics_enabled - next error_json(400, "Statistics are not enabled.") - end - - Invidious::Jobs::StatisticsRefreshJob::STATISTICS.to_json -end - -# YouTube provides "storyboards", which are sprites containing x * y -# preview thumbnails for individual scenes in a video. -# See https://support.jwplayer.com/articles/how-to-add-preview-thumbnails -get "/api/v1/storyboards/:id" do |env| - locale = LOCALES[env.get("preferences").as(Preferences).locale]? - - env.response.content_type = "application/json" - - id = env.params.url["id"] - region = env.params.query["region"]? - - begin - video = get_video(id, PG_DB, region: region) - rescue ex : VideoRedirect - env.response.headers["Location"] = env.request.resource.gsub(id, ex.video_id) - next error_json(302, "Video is unavailable", {"videoId" => ex.video_id}) - rescue ex - env.response.status_code = 500 - next - end - - storyboards = video.storyboards - width = env.params.query["width"]? - height = env.params.query["height"]? - - if !width && !height - response = JSON.build do |json| - json.object do - json.field "storyboards" do - generate_storyboards(json, id, storyboards) - end - end - end - - next response - end - - env.response.content_type = "text/vtt" - - storyboard = storyboards.select { |storyboard| width == "#{storyboard[:width]}" || height == "#{storyboard[:height]}" } - - if storyboard.empty? - env.response.status_code = 404 - next - else - storyboard = storyboard[0] - end - - String.build do |str| - str << <<-END_VTT - WEBVTT - - - END_VTT - - start_time = 0.milliseconds - end_time = storyboard[:interval].milliseconds - - storyboard[:storyboard_count].times do |i| - url = storyboard[:url] - authority = /(i\d?).ytimg.com/.match(url).not_nil![1]? - url = url.gsub("$M", i).gsub(%r(https://i\d?.ytimg.com/sb/), "") - url = "#{HOST_URL}/sb/#{authority}/#{url}" - - storyboard[:storyboard_height].times do |j| - storyboard[:storyboard_width].times do |k| - str << <<-END_CUE - #{start_time}.000 --> #{end_time}.000 - #{url}#xywh=#{storyboard[:width] * k},#{storyboard[:height] * j},#{storyboard[:width] - 2},#{storyboard[:height]} - - - END_CUE - - start_time += storyboard[:interval].milliseconds - end_time += storyboard[:interval].milliseconds - end - end - end - end -end - -get "/api/v1/captions/:id" do |env| - locale = LOCALES[env.get("preferences").as(Preferences).locale]? - - env.response.content_type = "application/json" - - id = env.params.url["id"] - region = env.params.query["region"]? - - # See https://github.com/ytdl-org/youtube-dl/blob/6ab30ff50bf6bd0585927cb73c7421bef184f87a/youtube_dl/extractor/youtube.py#L1354 - # It is possible to use `/api/timedtext?type=list&v=#{id}` and - # `/api/timedtext?type=track&v=#{id}&lang=#{lang_code}` directly, - # but this does not provide links for auto-generated captions. - # - # In future this should be investigated as an alternative, since it does not require - # getting video info. - - begin - video = get_video(id, PG_DB, region: region) - rescue ex : VideoRedirect - env.response.headers["Location"] = env.request.resource.gsub(id, ex.video_id) - next error_json(302, "Video is unavailable", {"videoId" => ex.video_id}) - rescue ex - env.response.status_code = 500 - next - end - - captions = video.captions - - label = env.params.query["label"]? - lang = env.params.query["lang"]? - tlang = env.params.query["tlang"]? - - if !label && !lang - response = JSON.build do |json| - json.object do - json.field "captions" do - json.array do - captions.each do |caption| - json.object do - json.field "label", caption.name - json.field "languageCode", caption.languageCode - json.field "url", "/api/v1/captions/#{id}?label=#{URI.encode_www_form(caption.name)}" - end - end - end - end - end - end - - next response - end - - env.response.content_type = "text/vtt; charset=UTF-8" - - if lang - caption = captions.select { |caption| caption.languageCode == lang } - else - caption = captions.select { |caption| caption.name == label } - end - - if caption.empty? - env.response.status_code = 404 - next - else - caption = caption[0] - end - - url = URI.parse("#{caption.baseUrl}&tlang=#{tlang}").request_target - - # Auto-generated captions often have cues that aren't aligned properly with the video, - # as well as some other markup that makes it cumbersome, so we try to fix that here - if caption.name.includes? "auto-generated" - caption_xml = YT_POOL.client &.get(url).body - caption_xml = XML.parse(caption_xml) - - webvtt = String.build do |str| - str << <<-END_VTT - WEBVTT - Kind: captions - Language: #{tlang || caption.languageCode} - - - END_VTT - - caption_nodes = caption_xml.xpath_nodes("//transcript/text") - caption_nodes.each_with_index do |node, i| - start_time = node["start"].to_f.seconds - duration = node["dur"]?.try &.to_f.seconds - duration ||= start_time - - if caption_nodes.size > i + 1 - end_time = caption_nodes[i + 1]["start"].to_f.seconds - else - end_time = start_time + duration - end - - start_time = "#{start_time.hours.to_s.rjust(2, '0')}:#{start_time.minutes.to_s.rjust(2, '0')}:#{start_time.seconds.to_s.rjust(2, '0')}.#{start_time.milliseconds.to_s.rjust(3, '0')}" - end_time = "#{end_time.hours.to_s.rjust(2, '0')}:#{end_time.minutes.to_s.rjust(2, '0')}:#{end_time.seconds.to_s.rjust(2, '0')}.#{end_time.milliseconds.to_s.rjust(3, '0')}" - - text = HTML.unescape(node.content) - text = text.gsub(/<font color="#[a-fA-F0-9]{6}">/, "") - text = text.gsub(/<\/font>/, "") - if md = text.match(/(?<name>.*) : (?<text>.*)/) - text = "<v #{md["name"]}>#{md["text"]}</v>" - end - - str << <<-END_CUE - #{start_time} --> #{end_time} - #{text} - - - END_CUE - end - end - else - webvtt = YT_POOL.client &.get("#{url}&format=vtt").body - end - - if title = env.params.query["title"]? - # https://blog.fastmail.com/2011/06/24/download-non-english-filenames/ - env.response.headers["Content-Disposition"] = "attachment; filename=\"#{URI.encode_www_form(title)}\"; filename*=UTF-8''#{URI.encode_www_form(title)}" - end - - webvtt -end - -get "/api/v1/comments/:id" do |env| - locale = LOCALES[env.get("preferences").as(Preferences).locale]? - region = env.params.query["region"]? - - env.response.content_type = "application/json" - - id = env.params.url["id"] - - source = env.params.query["source"]? - source ||= "youtube" - - thin_mode = env.params.query["thin_mode"]? - thin_mode = thin_mode == "true" - - format = env.params.query["format"]? - format ||= "json" - - continuation = env.params.query["continuation"]? - sort_by = env.params.query["sort_by"]?.try &.downcase - - if source == "youtube" - sort_by ||= "top" - - begin - comments = fetch_youtube_comments(id, continuation, format, locale, thin_mode, region, sort_by: sort_by) - rescue ex - next error_json(500, ex) - end - - next comments - elsif source == "reddit" - sort_by ||= "confidence" - - begin - comments, reddit_thread = fetch_reddit_comments(id, sort_by: sort_by) - content_html = template_reddit_comments(comments, locale) - - content_html = fill_links(content_html, "https", "www.reddit.com") - content_html = replace_links(content_html) - rescue ex - comments = nil - reddit_thread = nil - content_html = "" - end - - if !reddit_thread || !comments - env.response.status_code = 404 - next - end - - if format == "json" - reddit_thread = JSON.parse(reddit_thread.to_json).as_h - reddit_thread["comments"] = JSON.parse(comments.to_json) - - next reddit_thread.to_json - else - response = { - "title" => reddit_thread.title, - "permalink" => reddit_thread.permalink, - "contentHtml" => content_html, - } - - next response.to_json - end - end -end - -get "/api/v1/annotations/:id" do |env| - locale = LOCALES[env.get("preferences").as(Preferences).locale]? - - env.response.content_type = "text/xml" - - id = env.params.url["id"] - source = env.params.query["source"]? - source ||= "archive" - - if !id.match(/[a-zA-Z0-9_-]{11}/) - env.response.status_code = 400 - next - end - - annotations = "" - - case source - when "archive" - if CONFIG.cache_annotations && (cached_annotation = PG_DB.query_one?("SELECT * FROM annotations WHERE id = $1", id, as: Annotation)) - annotations = cached_annotation.annotations - else - index = CHARS_SAFE.index(id[0]).not_nil!.to_s.rjust(2, '0') - - # IA doesn't handle leading hyphens, - # so we use https://archive.org/details/youtubeannotations_64 - if index == "62" - index = "64" - id = id.sub(/^-/, 'A') - end - - file = URI.encode_www_form("#{id[0, 3]}/#{id}.xml") - - location = make_client(ARCHIVE_URL, &.get("/download/youtubeannotations_#{index}/#{id[0, 2]}.tar/#{file}")) - - if !location.headers["Location"]? - env.response.status_code = location.status_code - end - - response = make_client(URI.parse(location.headers["Location"]), &.get(location.headers["Location"])) - - if response.body.empty? - env.response.status_code = 404 - next - end - - if response.status_code != 200 - env.response.status_code = response.status_code - next - end - - annotations = response.body - - cache_annotation(PG_DB, id, annotations) - end - else # "youtube" - response = YT_POOL.client &.get("/annotations_invideo?video_id=#{id}") - - if response.status_code != 200 - env.response.status_code = response.status_code - next - end - - annotations = response.body - end - - etag = sha256(annotations)[0, 16] - if env.request.headers["If-None-Match"]?.try &.== etag - env.response.status_code = 304 - else - env.response.headers["ETag"] = etag - annotations - end -end - get "/api/v1/videos/:id" do |env| locale = LOCALES[env.get("preferences").as(Preferences).locale]? @@ -2016,324 +1659,6 @@ get "/api/v1/videos/:id" do |env| video.to_json(locale) end -get "/api/v1/trending" do |env| - locale = LOCALES[env.get("preferences").as(Preferences).locale]? - - env.response.content_type = "application/json" - - region = env.params.query["region"]? - trending_type = env.params.query["type"]? - - begin - trending, plid = fetch_trending(trending_type, region, locale) - rescue ex - next error_json(500, ex) - end - - videos = JSON.build do |json| - json.array do - trending.each do |video| - video.to_json(locale, json) - end - end - end - - videos -end - -get "/api/v1/popular" do |env| - locale = LOCALES[env.get("preferences").as(Preferences).locale]? - - env.response.content_type = "application/json" - - if !CONFIG.popular_enabled - error_message = {"error" => "Administrator has disabled this endpoint."}.to_json - env.response.status_code = 400 - next error_message - end - - JSON.build do |json| - json.array do - popular_videos.each do |video| - video.to_json(locale, json) - end - end - end -end - -get "/api/v1/channels/:ucid" do |env| - locale = LOCALES[env.get("preferences").as(Preferences).locale]? - - env.response.content_type = "application/json" - - ucid = env.params.url["ucid"] - sort_by = env.params.query["sort_by"]?.try &.downcase - sort_by ||= "newest" - - begin - channel = get_about_info(ucid, locale) - rescue ex : ChannelRedirect - env.response.headers["Location"] = env.request.resource.gsub(ucid, ex.channel_id) - next error_json(302, "Channel is unavailable", {"authorId" => ex.channel_id}) - rescue ex - next error_json(500, ex) - end - - page = 1 - if channel.auto_generated - videos = [] of SearchVideo - count = 0 - else - begin - count, videos = get_60_videos(channel.ucid, channel.author, page, channel.auto_generated, sort_by) - rescue ex - next error_json(500, ex) - end - end - - JSON.build do |json| - # TODO: Refactor into `to_json` for InvidiousChannel - json.object do - json.field "author", channel.author - json.field "authorId", channel.ucid - json.field "authorUrl", channel.author_url - - json.field "authorBanners" do - json.array do - if channel.banner - qualities = { - {width: 2560, height: 424}, - {width: 2120, height: 351}, - {width: 1060, height: 175}, - } - qualities.each do |quality| - json.object do - json.field "url", channel.banner.not_nil!.gsub("=w1060-", "=w#{quality[:width]}-") - json.field "width", quality[:width] - json.field "height", quality[:height] - end - end - - json.object do - json.field "url", channel.banner.not_nil!.split("=w1060-")[0] - json.field "width", 512 - json.field "height", 288 - end - end - end - end - - json.field "authorThumbnails" do - json.array do - qualities = {32, 48, 76, 100, 176, 512} - - qualities.each do |quality| - json.object do - json.field "url", channel.author_thumbnail.gsub(/=s\d+/, "=s#{quality}") - json.field "width", quality - json.field "height", quality - end - end - end - end - - json.field "subCount", channel.sub_count - json.field "totalViews", channel.total_views - json.field "joined", channel.joined.to_unix - - json.field "autoGenerated", channel.auto_generated - json.field "isFamilyFriendly", channel.is_family_friendly - json.field "description", html_to_content(channel.description_html) - json.field "descriptionHtml", channel.description_html - - json.field "allowedRegions", channel.allowed_regions - - json.field "latestVideos" do - json.array do - videos.each do |video| - video.to_json(locale, json) - end - end - end - - json.field "relatedChannels" do - json.array do - channel.related_channels.each do |related_channel| - json.object do - json.field "author", related_channel.author - json.field "authorId", related_channel.ucid - json.field "authorUrl", related_channel.author_url - - json.field "authorThumbnails" do - json.array do - qualities = {32, 48, 76, 100, 176, 512} - - qualities.each do |quality| - json.object do - json.field "url", related_channel.author_thumbnail.gsub(/=\d+/, "=s#{quality}") - json.field "width", quality - json.field "height", quality - end - end - end - end - end - end - end - end - end - end -end - -{"/api/v1/channels/:ucid/videos", "/api/v1/channels/videos/:ucid"}.each do |route| - get route do |env| - locale = LOCALES[env.get("preferences").as(Preferences).locale]? - - env.response.content_type = "application/json" - - ucid = env.params.url["ucid"] - page = env.params.query["page"]?.try &.to_i? - page ||= 1 - sort_by = env.params.query["sort"]?.try &.downcase - sort_by ||= env.params.query["sort_by"]?.try &.downcase - sort_by ||= "newest" - - begin - channel = get_about_info(ucid, locale) - rescue ex : ChannelRedirect - env.response.headers["Location"] = env.request.resource.gsub(ucid, ex.channel_id) - next error_json(302, "Channel is unavailable", {"authorId" => ex.channel_id}) - rescue ex - next error_json(500, ex) - end - - begin - count, videos = get_60_videos(channel.ucid, channel.author, page, channel.auto_generated, sort_by) - rescue ex - next error_json(500, ex) - end - - JSON.build do |json| - json.array do - videos.each do |video| - video.to_json(locale, json) - end - end - end - end -end - -{"/api/v1/channels/:ucid/latest", "/api/v1/channels/latest/:ucid"}.each do |route| - get route do |env| - locale = LOCALES[env.get("preferences").as(Preferences).locale]? - - env.response.content_type = "application/json" - - ucid = env.params.url["ucid"] - - begin - videos = get_latest_videos(ucid) - rescue ex - next error_json(500, ex) - end - - JSON.build do |json| - json.array do - videos.each do |video| - video.to_json(locale, json) - end - end - end - end -end - -{"/api/v1/channels/:ucid/playlists", "/api/v1/channels/playlists/:ucid"}.each do |route| - get route do |env| - locale = LOCALES[env.get("preferences").as(Preferences).locale]? - - env.response.content_type = "application/json" - - ucid = env.params.url["ucid"] - continuation = env.params.query["continuation"]? - sort_by = env.params.query["sort"]?.try &.downcase || - env.params.query["sort_by"]?.try &.downcase || - "last" - - begin - channel = get_about_info(ucid, locale) - rescue ex : ChannelRedirect - env.response.headers["Location"] = env.request.resource.gsub(ucid, ex.channel_id) - next error_json(302, "Channel is unavailable", {"authorId" => ex.channel_id}) - rescue ex - next error_json(500, ex) - end - - items, continuation = fetch_channel_playlists(channel.ucid, channel.author, continuation, sort_by) - - JSON.build do |json| - json.object do - json.field "playlists" do - json.array do - items.each do |item| - item.to_json(locale, json) if item.is_a?(SearchPlaylist) - end - end - end - - json.field "continuation", continuation - end - end - end -end - -{"/api/v1/channels/:ucid/comments", "/api/v1/channels/comments/:ucid"}.each do |route| - get route do |env| - locale = LOCALES[env.get("preferences").as(Preferences).locale]? - - env.response.content_type = "application/json" - - ucid = env.params.url["ucid"] - - thin_mode = env.params.query["thin_mode"]? - thin_mode = thin_mode == "true" - - format = env.params.query["format"]? - format ||= "json" - - continuation = env.params.query["continuation"]? - # sort_by = env.params.query["sort_by"]?.try &.downcase - - begin - fetch_channel_community(ucid, continuation, locale, format, thin_mode) - rescue ex - next error_json(500, ex) - end - end -end - -get "/api/v1/channels/search/:ucid" do |env| - locale = LOCALES[env.get("preferences").as(Preferences).locale]? - - env.response.content_type = "application/json" - - ucid = env.params.url["ucid"] - - query = env.params.query["q"]? - query ||= "" - - page = env.params.query["page"]?.try &.to_i? - page ||= 1 - - count, search_results = channel_search(query, page, ucid) - JSON.build do |json| - json.array do - search_results.each do |item| - item.to_json(locale, json) - end - end - end -end - get "/api/v1/search" do |env| locale = LOCALES[env.get("preferences").as(Preferences).locale]? region = env.params.query["region"]? @@ -2377,40 +1702,6 @@ get "/api/v1/search" do |env| end end -get "/api/v1/search/suggestions" do |env| - locale = LOCALES[env.get("preferences").as(Preferences).locale]? - region = env.params.query["region"]? - - env.response.content_type = "application/json" - - query = env.params.query["q"]? - query ||= "" - - begin - headers = HTTP::Headers{":authority" => "suggestqueries.google.com"} - response = YT_POOL.client &.get("/complete/search?hl=en&gl=#{region}&client=youtube&ds=yt&q=#{URI.encode_www_form(query)}&callback=suggestCallback", headers).body - - body = response[35..-2] - body = JSON.parse(body).as_a - suggestions = body[1].as_a[0..-2] - - JSON.build do |json| - json.object do - json.field "query", body[0].as_s - json.field "suggestions" do - json.array do - suggestions.each do |suggestion| - json.string suggestion[0].as_s - end - end - end - end - end - rescue ex - next error_json(500, ex) - end -end - {"/api/v1/playlists/:plid", "/api/v1/auth/playlists/:plid"}.each do |route| get route do |env| locale = LOCALES[env.get("preferences").as(Preferences).locale]? |
