diff options
| author | Omar Roth <omarroth@protonmail.com> | 2019-07-05 11:34:22 -0500 |
|---|---|---|
| committer | Omar Roth <omarroth@protonmail.com> | 2019-07-05 11:35:04 -0500 |
| commit | 1277c3d156ebe1152304ea77c10fbe483376803b (patch) | |
| tree | 2581c82f4c0b4de190512af8567727cc46f893c0 | |
| parent | 8033d1ca6dbe95896b5ca2e49d2755f6f70fcd0a (diff) | |
| download | invidious-1277c3d156ebe1152304ea77c10fbe483376803b.tar.gz invidious-1277c3d156ebe1152304ea77c10fbe483376803b.tar.bz2 invidious-1277c3d156ebe1152304ea77c10fbe483376803b.zip | |
Fix chunk size for livestreams
| -rw-r--r-- | src/invidious.cr | 158 |
1 files changed, 92 insertions, 66 deletions
diff --git a/src/invidious.cr b/src/invidious.cr index 3ee87a24..b86adf48 100644 --- a/src/invidious.cr +++ b/src/invidious.cr @@ -4429,6 +4429,7 @@ get "/api/manifest/hls_variant/*" do |env| manifest end +# TODO: Fix redirect for local streams get "/api/manifest/hls_playlist/*" do |env| client = make_client(YT_URL) manifest = client.get(env.request.path) @@ -4641,93 +4642,118 @@ get "/videoplayback" do |env| next end - content_length = nil - first_chunk = true - range_start, range_end = parse_range(env.request.headers["Range"]?) - chunk_start = range_start - chunk_end = range_end + if url.includes? "&file=seg.ts" + begin + client = make_client(URI.parse(host), region) + client.get(url, headers) do |response| + response.headers.each do |key, value| + if !RESPONSE_HEADERS_BLACKLIST.includes?(key) + env.response.headers[key] = value + end + end - if !chunk_end || chunk_end - chunk_start > HTTP_CHUNK_SIZE - chunk_end = chunk_start + HTTP_CHUNK_SIZE - 1 - end + env.response.headers["Access-Control-Allow-Origin"] = "*" - # TODO: Record bytes written so we can restart after a chunk fails - while true - if !range_end && content_length - range_end = content_length - end + if location = response.headers["Location"]? + location = URI.parse(location) + location = "#{location.full_path}&host=#{location.host}" - if range_end && chunk_start > range_end - break + if region + location += "®ion=#{region}" + end + + next env.redirect location + end + + IO.copy(response.body_io, env.response) + end + rescue ex end + else + content_length = nil + first_chunk = true + range_start, range_end = parse_range(env.request.headers["Range"]?) + chunk_start = range_start + chunk_end = range_end - if range_end && chunk_end > range_end - chunk_end = range_end + if !chunk_end || chunk_end - chunk_start > HTTP_CHUNK_SIZE + chunk_end = chunk_start + HTTP_CHUNK_SIZE - 1 end - headers["Range"] = "bytes=#{chunk_start}-#{chunk_end}" - client = make_client(URI.parse(host), region) - begin - client.get(url, headers) do |response| - if first_chunk - if !env.request.headers["Range"]? && response.status_code == 206 - env.response.status_code = 200 - else - env.response.status_code = response.status_code - end + # TODO: Record bytes written so we can restart after a chunk fails + while true + if !range_end && content_length + range_end = content_length + end - response.headers.each do |key, value| - if !RESPONSE_HEADERS_BLACKLIST.includes?(key) && key != "Content-Range" - env.response.headers[key] = value - end - end + if range_end && chunk_start > range_end + break + end - env.response.headers["Access-Control-Allow-Origin"] = "*" + if range_end && chunk_end > range_end + chunk_end = range_end + end - if location = response.headers["Location"]? - location = URI.parse(location) - location = "#{location.full_path}&host=#{location.host}" + headers["Range"] = "bytes=#{chunk_start}-#{chunk_end}" - if region - location += "®ion=#{region}" + begin + client = make_client(URI.parse(host), region) + client.get(url, headers) do |response| + if first_chunk + if !env.request.headers["Range"]? && response.status_code == 206 + env.response.status_code = 200 + else + env.response.status_code = response.status_code end - env.redirect location - break - end + response.headers.each do |key, value| + if !RESPONSE_HEADERS_BLACKLIST.includes?(key) && key != "Content-Range" + env.response.headers[key] = value + end + end - if title = query_params["title"]? - # https://blog.fastmail.com/2011/06/24/download-non-english-filenames/ - env.response.headers["Content-Disposition"] = "attachment; filename=\"#{URI.escape(title)}\"; filename*=UTF-8''#{URI.escape(title)}" - end + env.response.headers["Access-Control-Allow-Origin"] = "*" - if !response.headers.includes_word?("Transfer-Encoding", "chunked") - content_length = response.headers["Content-Range"].split("/")[-1].to_i64 - if env.request.headers["Range"]? - env.response.headers["Content-Range"] = "bytes #{range_start}-#{range_end || (content_length - 1)}/#{content_length}" - env.response.content_length = ((range_end.try &.+ 1) || content_length) - range_start - else - env.response.content_length = content_length + if location = response.headers["Location"]? + location = URI.parse(location) + location = "#{location.full_path}&host=#{location.host}" + + if region + location += "®ion=#{region}" + end + + env.redirect location + break + end + + if title = query_params["title"]? + # https://blog.fastmail.com/2011/06/24/download-non-english-filenames/ + env.response.headers["Content-Disposition"] = "attachment; filename=\"#{URI.escape(title)}\"; filename*=UTF-8''#{URI.escape(title)}" + end + + if !response.headers.includes_word?("Transfer-Encoding", "chunked") + content_length = response.headers["Content-Range"].split("/")[-1].to_i64 + if env.request.headers["Range"]? + env.response.headers["Content-Range"] = "bytes #{range_start}-#{range_end || (content_length - 1)}/#{content_length}" + env.response.content_length = ((range_end.try &.+ 1) || content_length) - range_start + else + env.response.content_length = content_length + end end end - end - proxy_file(response, env) + proxy_file(response, env) + end + rescue ex + if ex.message != "Error reading socket: Connection reset by peer" + break + end end - # For livestream segments, break after first chunk - if url.includes? "&file=seg.ts" - break - end - rescue ex - if ex.message != "Error reading socket: Connection reset by peer" - break - end + chunk_start = chunk_end + 1 + chunk_end += HTTP_CHUNK_SIZE + first_chunk = false end - - chunk_start = chunk_end + 1 - chunk_end += HTTP_CHUNK_SIZE - first_chunk = false end end |
