forked from midou/invidious
Add support for polymer redesign
This commit is contained in:
parent
c1cbdae5ee
commit
1eca969cf6
@ -7,23 +7,6 @@ CREATE TABLE public.videos
|
||||
id text NOT NULL,
|
||||
info text,
|
||||
updated timestamp with time zone,
|
||||
title text,
|
||||
views bigint,
|
||||
likes integer,
|
||||
dislikes integer,
|
||||
wilson_score double precision,
|
||||
published timestamp with time zone,
|
||||
description text,
|
||||
language text,
|
||||
author text,
|
||||
ucid text,
|
||||
allowed_regions text[],
|
||||
is_family_friendly boolean,
|
||||
genre text,
|
||||
genre_url text,
|
||||
license text,
|
||||
sub_count_text text,
|
||||
author_thumbnail text,
|
||||
CONSTRAINT videos_pkey PRIMARY KEY (id)
|
||||
);
|
||||
|
||||
|
@ -27,9 +27,9 @@ describe "Helper" do
|
||||
|
||||
describe "#produce_channel_search_url" do
|
||||
it "correctly produces token for searching a specific channel" do
|
||||
produce_channel_search_url("UCXuqSBlHAE6Xw-yeJA0Tunw", "", 100).should eq("/browse_ajax?continuation=4qmFsgI-EhhVQ1h1cVNCbEhBRTZYdy15ZUpBMFR1bncaIEVnWnpaV0Z5WTJnd0FqZ0JZQUZxQUxnQkFIb0RNVEF3WgA%3D&gl=US&hl=en")
|
||||
produce_channel_search_url("UCXuqSBlHAE6Xw-yeJA0Tunw", "", 100).should eq("/browse_ajax?continuation=4qmFsgI2EhhVQ1h1cVNCbEhBRTZYdy15ZUpBMFR1bncaGEVnWnpaV0Z5WTJnNEFYb0RNVEF3dUFFQVoA&gl=US&hl=en")
|
||||
|
||||
produce_channel_search_url("UCXuqSBlHAE6Xw-yeJA0Tunw", "По ожиशुपतिरपि子而時ஸ்றீனி", 0).should eq("/browse_ajax?continuation=4qmFsgJ8EhhVQ1h1cVNCbEhBRTZYdy15ZUpBMFR1bncaIEVnWnpaV0Z5WTJnd0FqZ0JZQUZxQUxnQkFIb0JNQT09Wj7Qn9C-INC-0LbQuOCktuClgeCkquCkpOCkv-CksOCkquCkv-WtkOiAjOaZguCuuOCvjeCuseCvgOCuqeCuvw%3D%3D&gl=US&hl=en")
|
||||
produce_channel_search_url("UCXuqSBlHAE6Xw-yeJA0Tunw", "По ожиशुपतिरपि子而時ஸ்றீனி", 0).should eq("/browse_ajax?continuation=4qmFsgJ0EhhVQ1h1cVNCbEhBRTZYdy15ZUpBMFR1bncaGEVnWnpaV0Z5WTJnNEFYb0JNTGdCQUE9PVo-0J_QviDQvtC20LjgpLbgpYHgpKrgpKTgpL_gpLDgpKrgpL_lrZDogIzmmYLgrrjgr43grrHgr4Dgrqngrr8%3D&gl=US&hl=en")
|
||||
end
|
||||
end
|
||||
|
||||
|
173
src/invidious.cr
173
src/invidious.cr
@ -510,16 +510,16 @@ get "/watch" do |env|
|
||||
comment_html ||= ""
|
||||
end
|
||||
|
||||
fmt_stream = video.fmt_stream(decrypt_function)
|
||||
adaptive_fmts = video.adaptive_fmts(decrypt_function)
|
||||
fmt_stream = video.fmt_stream
|
||||
adaptive_fmts = video.adaptive_fmts
|
||||
|
||||
if params.local
|
||||
fmt_stream.each { |fmt| fmt["url"] = URI.parse(fmt["url"]).full_path }
|
||||
adaptive_fmts.each { |fmt| fmt["url"] = URI.parse(fmt["url"]).full_path }
|
||||
fmt_stream.each { |fmt| fmt["url"] = JSON::Any.new(URI.parse(fmt["url"].as_s).full_path) }
|
||||
adaptive_fmts.each { |fmt| fmt["url"] = JSON::Any.new(URI.parse(fmt["url"].as_s).full_path) }
|
||||
end
|
||||
|
||||
video_streams = video.video_streams(adaptive_fmts)
|
||||
audio_streams = video.audio_streams(adaptive_fmts)
|
||||
video_streams = video.video_streams
|
||||
audio_streams = video.audio_streams
|
||||
|
||||
# Older videos may not have audio sources available.
|
||||
# We redirect here so they're not unplayable
|
||||
@ -549,33 +549,23 @@ get "/watch" do |env|
|
||||
|
||||
aspect_ratio = "16:9"
|
||||
|
||||
video.description_html = fill_links(video.description_html, "https", "www.youtube.com")
|
||||
video.description_html = replace_links(video.description_html)
|
||||
|
||||
host_url = make_host_url(config, Kemal.config)
|
||||
|
||||
if video.player_response["streamingData"]?.try &.["hlsManifestUrl"]?
|
||||
hlsvp = video.player_response["streamingData"]["hlsManifestUrl"].as_s
|
||||
hlsvp = hlsvp.gsub("https://manifest.googlevideo.com", host_url)
|
||||
end
|
||||
|
||||
thumbnail = "/vi/#{video.id}/maxres.jpg"
|
||||
|
||||
if params.raw
|
||||
if params.listen
|
||||
url = audio_streams[0]["url"]
|
||||
url = audio_streams[0]["url"].as_s
|
||||
|
||||
audio_streams.each do |fmt|
|
||||
if fmt["bitrate"] == params.quality.rchop("k")
|
||||
url = fmt["url"]
|
||||
if fmt["bitrate"].as_i == params.quality.rchop("k").to_i
|
||||
url = fmt["url"].as_s
|
||||
end
|
||||
end
|
||||
else
|
||||
url = fmt_stream[0]["url"]
|
||||
url = fmt_stream[0]["url"].as_s
|
||||
|
||||
fmt_stream.each do |fmt|
|
||||
if fmt["label"].split(" - ")[0] == params.quality
|
||||
url = fmt["url"]
|
||||
if fmt["quality"].as_s == params.quality
|
||||
url = fmt["url"].as_s
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -583,24 +573,6 @@ get "/watch" do |env|
|
||||
next env.redirect url
|
||||
end
|
||||
|
||||
rvs = [] of Hash(String, String)
|
||||
video.info["rvs"]?.try &.split(",").each do |rv|
|
||||
rvs << HTTP::Params.parse(rv).to_h
|
||||
end
|
||||
|
||||
rating = video.info["avg_rating"].to_f64
|
||||
if video.views > 0
|
||||
engagement = ((video.dislikes.to_f + video.likes.to_f)/video.views * 100)
|
||||
else
|
||||
engagement = 0
|
||||
end
|
||||
|
||||
playability_status = video.player_response["playabilityStatus"]?
|
||||
if playability_status && playability_status["status"] == "LIVE_STREAM_OFFLINE" && !video.premiere_timestamp
|
||||
reason = playability_status["reason"]?.try &.as_s
|
||||
end
|
||||
reason ||= ""
|
||||
|
||||
templated "watch"
|
||||
end
|
||||
|
||||
@ -752,16 +724,16 @@ get "/embed/:id" do |env|
|
||||
notifications.delete(id)
|
||||
end
|
||||
|
||||
fmt_stream = video.fmt_stream(decrypt_function)
|
||||
adaptive_fmts = video.adaptive_fmts(decrypt_function)
|
||||
fmt_stream = video.fmt_stream
|
||||
adaptive_fmts = video.adaptive_fmts
|
||||
|
||||
if params.local
|
||||
fmt_stream.each { |fmt| fmt["url"] = URI.parse(fmt["url"]).full_path }
|
||||
adaptive_fmts.each { |fmt| fmt["url"] = URI.parse(fmt["url"]).full_path }
|
||||
fmt_stream.each { |fmt| fmt["url"] = JSON::Any.new(URI.parse(fmt["url"].as_s).full_path) }
|
||||
adaptive_fmts.each { |fmt| fmt["url"] = JSON::Any.new(URI.parse(fmt["url"].as_s).full_path) }
|
||||
end
|
||||
|
||||
video_streams = video.video_streams(adaptive_fmts)
|
||||
audio_streams = video.audio_streams(adaptive_fmts)
|
||||
video_streams = video.video_streams
|
||||
audio_streams = video.audio_streams
|
||||
|
||||
if audio_streams.empty? && !video.live_now
|
||||
if params.quality == "dash"
|
||||
@ -788,25 +760,13 @@ get "/embed/:id" do |env|
|
||||
|
||||
aspect_ratio = nil
|
||||
|
||||
video.description_html = fill_links(video.description_html, "https", "www.youtube.com")
|
||||
video.description_html = replace_links(video.description_html)
|
||||
|
||||
host_url = make_host_url(config, Kemal.config)
|
||||
|
||||
if video.player_response["streamingData"]?.try &.["hlsManifestUrl"]?
|
||||
hlsvp = video.player_response["streamingData"]["hlsManifestUrl"].as_s
|
||||
hlsvp = hlsvp.gsub("https://manifest.googlevideo.com", host_url)
|
||||
end
|
||||
|
||||
thumbnail = "/vi/#{video.id}/maxres.jpg"
|
||||
|
||||
if params.raw
|
||||
url = fmt_stream[0]["url"]
|
||||
url = fmt_stream[0]["url"].as_s
|
||||
|
||||
fmt_stream.each do |fmt|
|
||||
if fmt["label"].split(" - ")[0] == params.quality
|
||||
url = fmt["url"]
|
||||
end
|
||||
url = fmt["url"].as_s if fmt["quality"].as_s == params.quality
|
||||
end
|
||||
|
||||
next env.redirect url
|
||||
@ -1469,7 +1429,6 @@ post "/login" do |env|
|
||||
traceback = IO::Memory.new
|
||||
|
||||
# See https://github.com/ytdl-org/youtube-dl/blob/2019.04.07/youtube_dl/extractor/youtube.py#L82
|
||||
# TODO: Convert to QUIC
|
||||
begin
|
||||
client = QUIC::Client.new(LOGIN_URL)
|
||||
headers = HTTP::Headers.new
|
||||
@ -2329,8 +2288,7 @@ get "/modify_notifications" do |env|
|
||||
end
|
||||
headers = cookies.add_request_headers(headers)
|
||||
|
||||
match = html.body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/)
|
||||
if match
|
||||
if match = html.body.match(/'XSRF_TOKEN': "(?<session_token>[^"]+)"/)
|
||||
session_token = match["session_token"]
|
||||
else
|
||||
next env.redirect referer
|
||||
@ -3575,14 +3533,14 @@ get "/channel/:ucid" do |env|
|
||||
item.author
|
||||
end
|
||||
end
|
||||
items = items.select { |item| item.is_a?(SearchPlaylist) }.map { |item| item.as(SearchPlaylist) }
|
||||
items = items.select(&.is_a?(SearchPlaylist)).map(&.as(SearchPlaylist))
|
||||
items.each { |item| item.author = "" }
|
||||
else
|
||||
sort_options = {"newest", "oldest", "popular"}
|
||||
sort_by ||= "newest"
|
||||
|
||||
items, count = get_60_videos(channel.ucid, channel.author, page, channel.auto_generated, sort_by)
|
||||
items.select! { |item| !item.paid }
|
||||
count, items = get_60_videos(channel.ucid, channel.author, page, channel.auto_generated, sort_by)
|
||||
items.reject! &.paid
|
||||
|
||||
env.set "search", "channel:#{channel.ucid} "
|
||||
end
|
||||
@ -5125,7 +5083,7 @@ get "/api/manifest/dash/id/:id" do |env|
|
||||
next
|
||||
end
|
||||
|
||||
if dashmpd = video.player_response["streamingData"]?.try &.["dashManifestUrl"]?.try &.as_s
|
||||
if dashmpd = video.dash_manifest_url
|
||||
manifest = YT_POOL.client &.get(URI.parse(dashmpd).full_path).body
|
||||
|
||||
manifest = manifest.gsub(/<BaseURL>[^<]+<\/BaseURL>/) do |baseurl|
|
||||
@ -5142,16 +5100,16 @@ get "/api/manifest/dash/id/:id" do |env|
|
||||
next manifest
|
||||
end
|
||||
|
||||
adaptive_fmts = video.adaptive_fmts(decrypt_function)
|
||||
adaptive_fmts = video.adaptive_fmts
|
||||
|
||||
if local
|
||||
adaptive_fmts.each do |fmt|
|
||||
fmt["url"] = URI.parse(fmt["url"]).full_path
|
||||
fmt["url"] = JSON::Any.new(URI.parse(fmt["url"].as_s).full_path)
|
||||
end
|
||||
end
|
||||
|
||||
audio_streams = video.audio_streams(adaptive_fmts)
|
||||
video_streams = video.video_streams(adaptive_fmts).sort_by { |stream| {stream["size"].split("x")[0].to_i, stream["fps"].to_i} }.reverse
|
||||
audio_streams = video.audio_streams
|
||||
video_streams = video.video_streams.sort_by { |stream| {stream["width"].as_i, stream["fps"].as_i} }.reverse
|
||||
|
||||
XML.build(indent: " ", encoding: "UTF-8") do |xml|
|
||||
xml.element("MPD", "xmlns": "urn:mpeg:dash:schema:mpd:2011",
|
||||
@ -5161,24 +5119,22 @@ get "/api/manifest/dash/id/:id" do |env|
|
||||
i = 0
|
||||
|
||||
{"audio/mp4", "audio/webm"}.each do |mime_type|
|
||||
mime_streams = audio_streams.select { |stream| stream["type"].starts_with? mime_type }
|
||||
if mime_streams.empty?
|
||||
next
|
||||
end
|
||||
mime_streams = audio_streams.select { |stream| stream["mimeType"].as_s.starts_with? mime_type }
|
||||
next if mime_streams.empty?
|
||||
|
||||
xml.element("AdaptationSet", id: i, mimeType: mime_type, startWithSAP: 1, subsegmentAlignment: true) do
|
||||
mime_streams.each do |fmt|
|
||||
codecs = fmt["type"].split("codecs=")[1].strip('"')
|
||||
bandwidth = fmt["bitrate"].to_i * 1000
|
||||
itag = fmt["itag"]
|
||||
url = fmt["url"]
|
||||
codecs = fmt["mimeType"].as_s.split("codecs=")[1].strip('"')
|
||||
bandwidth = fmt["bitrate"].as_i
|
||||
itag = fmt["itag"].as_i
|
||||
url = fmt["url"].as_s
|
||||
|
||||
xml.element("Representation", id: fmt["itag"], codecs: codecs, bandwidth: bandwidth) do
|
||||
xml.element("AudioChannelConfiguration", schemeIdUri: "urn:mpeg:dash:23003:3:audio_channel_configuration:2011",
|
||||
value: "2")
|
||||
xml.element("BaseURL") { xml.text url }
|
||||
xml.element("SegmentBase", indexRange: fmt["index"]) do
|
||||
xml.element("Initialization", range: fmt["init"])
|
||||
xml.element("SegmentBase", indexRange: "#{fmt["indexRange"]["start"]}-#{fmt["indexRange"]["end"]}") do
|
||||
xml.element("Initialization", range: "#{fmt["initRange"]["start"]}-#{fmt["initRange"]["end"]}")
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -5187,21 +5143,24 @@ get "/api/manifest/dash/id/:id" do |env|
|
||||
i += 1
|
||||
end
|
||||
|
||||
potential_heights = {4320, 2160, 1440, 1080, 720, 480, 360, 240, 144}
|
||||
|
||||
{"video/mp4", "video/webm"}.each do |mime_type|
|
||||
mime_streams = video_streams.select { |stream| stream["type"].starts_with? mime_type }
|
||||
mime_streams = video_streams.select { |stream| stream["mimeType"].as_s.starts_with? mime_type }
|
||||
next if mime_streams.empty?
|
||||
|
||||
heights = [] of Int32
|
||||
xml.element("AdaptationSet", id: i, mimeType: mime_type, startWithSAP: 1, subsegmentAlignment: true, scanType: "progressive") do
|
||||
mime_streams.each do |fmt|
|
||||
codecs = fmt["type"].split("codecs=")[1].strip('"')
|
||||
bandwidth = fmt["bitrate"]
|
||||
itag = fmt["itag"]
|
||||
url = fmt["url"]
|
||||
width, height = fmt["size"].split("x").map { |i| i.to_i }
|
||||
codecs = fmt["mimeType"].as_s.split("codecs=")[1].strip('"')
|
||||
bandwidth = fmt["bitrate"].as_i
|
||||
itag = fmt["itag"].as_i
|
||||
url = fmt["url"].as_s
|
||||
width = fmt["width"].as_i
|
||||
height = fmt["height"].as_i
|
||||
|
||||
# Resolutions reported by YouTube player (may not accurately reflect source)
|
||||
height = [4320, 2160, 1440, 1080, 720, 480, 360, 240, 144].sort_by { |i| (height - i).abs }[0]
|
||||
height = potential_heights.min_by { |i| (height - i).abs }
|
||||
next if unique_res && heights.includes? height
|
||||
heights << height
|
||||
|
||||
@ -5209,8 +5168,8 @@ get "/api/manifest/dash/id/:id" do |env|
|
||||
startWithSAP: "1", maxPlayoutRate: "1",
|
||||
bandwidth: bandwidth, frameRate: fmt["fps"]) do
|
||||
xml.element("BaseURL") { xml.text url }
|
||||
xml.element("SegmentBase", indexRange: fmt["index"]) do
|
||||
xml.element("Initialization", range: fmt["init"])
|
||||
xml.element("SegmentBase", indexRange: "#{fmt["indexRange"]["start"]}-#{fmt["indexRange"]["end"]}") do
|
||||
xml.element("Initialization", range: "#{fmt["initRange"]["start"]}-#{fmt["initRange"]["end"]}")
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -5224,10 +5183,10 @@ get "/api/manifest/dash/id/:id" do |env|
|
||||
end
|
||||
|
||||
get "/api/manifest/hls_variant/*" do |env|
|
||||
manifest = YT_POOL.client &.get(env.request.path)
|
||||
response = YT_POOL.client &.get(env.request.path)
|
||||
|
||||
if manifest.status_code != 200
|
||||
env.response.status_code = manifest.status_code
|
||||
if response.status_code != 200
|
||||
env.response.status_code = response.status_code
|
||||
next
|
||||
end
|
||||
|
||||
@ -5247,10 +5206,10 @@ get "/api/manifest/hls_variant/*" do |env|
|
||||
end
|
||||
|
||||
get "/api/manifest/hls_playlist/*" do |env|
|
||||
manifest = YT_POOL.client &.get(env.request.path)
|
||||
response = YT_POOL.client &.get(env.request.path)
|
||||
|
||||
if manifest.status_code != 200
|
||||
env.response.status_code = manifest.status_code
|
||||
if response.status_code != 200
|
||||
env.response.status_code = response.status_code
|
||||
next
|
||||
end
|
||||
|
||||
@ -5320,7 +5279,7 @@ get "/latest_version" do |env|
|
||||
end
|
||||
|
||||
id ||= env.params.query["id"]?
|
||||
itag ||= env.params.query["itag"]?
|
||||
itag ||= env.params.query["itag"]?.try &.to_i
|
||||
|
||||
region = env.params.query["region"]?
|
||||
|
||||
@ -5335,26 +5294,16 @@ get "/latest_version" do |env|
|
||||
|
||||
video = get_video(id, PG_DB, region: region)
|
||||
|
||||
fmt_stream = video.fmt_stream(decrypt_function)
|
||||
adaptive_fmts = video.adaptive_fmts(decrypt_function)
|
||||
fmt = video.fmt_stream.find(nil) { |f| f["itag"].as_i == itag } || video.adaptive_fmts.find(nil) { |f| f["itag"].as_i == itag }
|
||||
url = fmt.try &.["url"]?.try &.as_s
|
||||
|
||||
urls = (fmt_stream + adaptive_fmts).select { |fmt| fmt["itag"] == itag }
|
||||
if urls.empty?
|
||||
if !url
|
||||
env.response.status_code = 404
|
||||
next
|
||||
elsif urls.size > 1
|
||||
env.response.status_code = 409
|
||||
next
|
||||
end
|
||||
|
||||
url = urls[0]["url"]
|
||||
if local
|
||||
url = URI.parse(url).full_path.not_nil!
|
||||
end
|
||||
|
||||
if title
|
||||
url += "&title=#{title}"
|
||||
end
|
||||
url = URI.parse(url).full_path.not_nil! if local
|
||||
url = "#{url}&title=#{title}" if title
|
||||
|
||||
env.redirect url
|
||||
end
|
||||
|
@ -232,9 +232,9 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
|
||||
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
|
||||
|
||||
if auto_generated
|
||||
videos = extract_videos(nodeset)
|
||||
videos = extract_videos_html(nodeset)
|
||||
else
|
||||
videos = extract_videos(nodeset, ucid, author)
|
||||
videos = extract_videos_html(nodeset, ucid, author)
|
||||
end
|
||||
end
|
||||
|
||||
@ -317,9 +317,9 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
|
||||
nodeset = nodeset.not_nil!
|
||||
|
||||
if auto_generated
|
||||
videos = extract_videos(nodeset)
|
||||
videos = extract_videos_html(nodeset)
|
||||
else
|
||||
videos = extract_videos(nodeset, ucid, author)
|
||||
videos = extract_videos_html(nodeset, ucid, author)
|
||||
end
|
||||
|
||||
count = nodeset.size
|
||||
@ -429,7 +429,7 @@ def fetch_channel_playlists(ucid, author, auto_generated, continuation, sort_by)
|
||||
if auto_generated
|
||||
items = extract_shelf_items(nodeset, ucid, author)
|
||||
else
|
||||
items = extract_items(nodeset, ucid, author)
|
||||
items = extract_items_html(nodeset, ucid, author)
|
||||
end
|
||||
|
||||
return items, continuation
|
||||
@ -584,16 +584,8 @@ def fetch_channel_community(ucid, continuation, locale, format, thin_mode)
|
||||
|
||||
headers = HTTP::Headers.new
|
||||
headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
|
||||
headers["content-type"] = "application/x-www-form-urlencoded"
|
||||
|
||||
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
|
||||
headers["x-spf-previous"] = ""
|
||||
headers["x-spf-referer"] = ""
|
||||
|
||||
headers["x-youtube-client-name"] = "1"
|
||||
headers["x-youtube-client-version"] = "2.20180719"
|
||||
|
||||
session_token = response.body.match(/"XSRF_TOKEN":"(?<session_token>[A-Za-z0-9\_\-\=]+)"/).try &.["session_token"]? || ""
|
||||
session_token = response.body.match(/"XSRF_TOKEN":"(?<session_token>[^"]+)"/).try &.["session_token"]? || ""
|
||||
post_req = {
|
||||
session_token: session_token,
|
||||
}
|
||||
@ -633,13 +625,7 @@ def fetch_channel_community(ucid, continuation, locale, format, thin_mode)
|
||||
|
||||
next if !post
|
||||
|
||||
if !post["contentText"]?
|
||||
content_html = ""
|
||||
else
|
||||
content_html = post["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff').try { |b| HTML.escape(b) }.to_s ||
|
||||
post["contentText"]["runs"]?.try &.as_a.try { |r| content_to_comment_html(r).try &.to_s } || ""
|
||||
end
|
||||
|
||||
content_html = post["contentText"]?.try { |t| parse_content(t) } || ""
|
||||
author = post["authorText"]?.try &.["simpleText"]? || ""
|
||||
|
||||
json.object do
|
||||
@ -960,7 +946,7 @@ def get_60_videos(ucid, author, page, auto_generated, sort_by = "newest")
|
||||
|
||||
2.times do |i|
|
||||
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated, sort_by: sort_by)
|
||||
response = YT_POOL.client &.get(url, headers)
|
||||
response = YT_POOL.client &.get(url)
|
||||
initial_data = JSON.parse(response.body).as_a.find &.["response"]?
|
||||
break if !initial_data
|
||||
videos.concat extract_videos(initial_data.as_h)
|
||||
@ -980,7 +966,7 @@ def get_latest_videos(ucid)
|
||||
document = XML.parse_html(json["content_html"].as_s)
|
||||
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
|
||||
|
||||
videos = extract_videos(nodeset, ucid)
|
||||
videos = extract_videos_html(nodeset, ucid)
|
||||
end
|
||||
|
||||
return videos
|
||||
|
@ -59,7 +59,7 @@ end
|
||||
|
||||
def fetch_youtube_comments(id, db, cursor, format, locale, thin_mode, region, sort_by = "top")
|
||||
video = get_video(id, db, region: region)
|
||||
session_token = video.info["session_token"]?
|
||||
session_token = video.session_token
|
||||
|
||||
case cursor
|
||||
when nil, ""
|
||||
@ -85,17 +85,9 @@ def fetch_youtube_comments(id, db, cursor, format, locale, thin_mode, region, so
|
||||
session_token: session_token,
|
||||
}
|
||||
|
||||
headers = HTTP::Headers.new
|
||||
|
||||
headers["content-type"] = "application/x-www-form-urlencoded"
|
||||
headers["cookie"] = video.info["cookie"]
|
||||
|
||||
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
|
||||
headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999"
|
||||
headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999"
|
||||
|
||||
headers["x-youtube-client-name"] = "1"
|
||||
headers["x-youtube-client-version"] = "2.20180719"
|
||||
headers = HTTP::Headers{
|
||||
"cookie" => video.cookie,
|
||||
}
|
||||
|
||||
response = YT_POOL.client(region, &.post("/comment_service_ajax?action_get_comments=1&hl=en&gl=US", headers, form: post_req))
|
||||
response = JSON.parse(response.body)
|
||||
@ -150,8 +142,7 @@ def fetch_youtube_comments(id, db, cursor, format, locale, thin_mode, region, so
|
||||
node_comment = node["commentRenderer"]
|
||||
end
|
||||
|
||||
content_html = node_comment["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff').try { |b| HTML.escape(b) }.to_s ||
|
||||
node_comment["contentText"]["runs"]?.try &.as_a.try { |r| content_to_comment_html(r).try &.to_s } || ""
|
||||
content_html = node_comment["contentText"]?.try { |t| parse_content(t) } || ""
|
||||
author = node_comment["authorText"]?.try &.["simpleText"]? || ""
|
||||
|
||||
json.field "author", author
|
||||
@ -523,6 +514,11 @@ def fill_links(html, scheme, host)
|
||||
return html.to_xml(options: XML::SaveOptions::NO_DECL)
|
||||
end
|
||||
|
||||
def parse_content(content : JSON::Any) : String
|
||||
content["simpleText"]?.try &.as_s.rchop('\ufeff').try { |b| HTML.escape(b) }.to_s ||
|
||||
content["runs"]?.try &.as_a.try { |r| content_to_comment_html(r).try &.to_s } || ""
|
||||
end
|
||||
|
||||
def content_to_comment_html(content)
|
||||
comment_html = content.map do |run|
|
||||
text = HTML.escape(run["text"].as_s)
|
||||
|
@ -313,13 +313,149 @@ def html_to_content(description_html : String)
|
||||
return description
|
||||
end
|
||||
|
||||
def extract_videos(nodeset, ucid = nil, author_name = nil)
|
||||
videos = extract_items(nodeset, ucid, author_name)
|
||||
videos.select { |item| item.is_a?(SearchVideo) }.map { |video| video.as(SearchVideo) }
|
||||
def extract_videos(initial_data : Hash(String, JSON::Any))
|
||||
extract_items(initial_data).select(&.is_a?(SearchVideo)).map(&.as(SearchVideo))
|
||||
end
|
||||
|
||||
def extract_items(nodeset, ucid = nil, author_name = nil)
|
||||
# TODO: Make this a 'common', so it makes more sense to be used here
|
||||
def extract_items(initial_data : Hash(String, JSON::Any))
|
||||
items = [] of SearchItem
|
||||
|
||||
initial_data.try { |t|
|
||||
t["contents"]? || t["response"]?
|
||||
}.try { |t|
|
||||
t["twoColumnBrowseResultsRenderer"]?.try &.["tabs"].as_a[0]?.try &.["tabRenderer"]["content"] ||
|
||||
t["twoColumnSearchResultsRenderer"]?.try &.["primaryContents"] ||
|
||||
t["continuationContents"]?
|
||||
}.try { |t| t["sectionListRenderer"]? || t["sectionListContinuation"]? }
|
||||
.try &.["contents"]
|
||||
.as_a.each { |c|
|
||||
c.try &.["itemSectionRenderer"]["contents"].as_a
|
||||
.try { |t| t[0]?.try &.["shelfRenderer"]?.try &.["content"]["expandedShelfContentsRenderer"]?.try &.["items"].as_a || t }
|
||||
.each { |item|
|
||||
if i = item["videoRenderer"]?
|
||||
video_id = i["videoId"].as_s
|
||||
title = i["title"].try { |t| t["simpleText"]?.try &.as_s || t["runs"]?.try &.as_a.map(&.["text"].as_s).join("") } || ""
|
||||
|
||||
author_info = i["ownerText"]?.try &.["runs"].as_a[0]?
|
||||
author = author_info.try &.["text"].as_s || ""
|
||||
author_id = author_info.try &.["navigationEndpoint"]?.try &.["browseEndpoint"]["browseId"].as_s || ""
|
||||
|
||||
published = i["publishedTimeText"]?.try &.["simpleText"]?.try { |t| decode_date(t.as_s) } || Time.local
|
||||
view_count = i["viewCountText"]?.try &.["simpleText"]?.try &.as_s.gsub(/\D+/, "").to_i64? || 0_i64
|
||||
description_html = i["descriptionSnippet"]?.try { |t| parse_content(t) } || ""
|
||||
length_seconds = i["lengthText"]?.try &.["simpleText"]?.try &.as_s.try { |t| decode_length_seconds(t) } || 0
|
||||
|
||||
live_now = false
|
||||
paid = false
|
||||
premium = false
|
||||
|
||||
premiere_timestamp = i["upcomingEventData"]?.try &.["startTime"]?.try { |t| Time.unix(t.as_s.to_i64) }
|
||||
|
||||
i["badges"]?.try &.as_a.each do |badge|
|
||||
b = badge["metadataBadgeRenderer"]
|
||||
case b["label"].as_s
|
||||
when "LIVE NOW"
|
||||
live_now = true
|
||||
when "New", "4K", "CC"
|
||||
# TODO
|
||||
when "Premium"
|
||||
paid = true
|
||||
|
||||
# TODO: Potentially available as i["topStandaloneBadge"]["metadataBadgeRenderer"]
|
||||
premium = true
|
||||
else nil # Ignore
|
||||
end
|
||||
end
|
||||
|
||||
items << SearchVideo.new(
|
||||
title: title,
|
||||
id: video_id,
|
||||
author: author,
|
||||
ucid: author_id,
|
||||
published: published,
|
||||
views: view_count,
|
||||
description_html: description_html,
|
||||
length_seconds: length_seconds,
|
||||
live_now: live_now,
|
||||
paid: paid,
|
||||
premium: premium,
|
||||
premiere_timestamp: premiere_timestamp
|
||||
)
|
||||
elsif i = item["channelRenderer"]?
|
||||
author = i["title"]["simpleText"]?.try &.as_s || ""
|
||||
author_id = i["channelId"]?.try &.as_s || ""
|
||||
|
||||
author_thumbnail = i["thumbnail"]["thumbnails"]?.try &.as_a[0]?.try { |u| "https:#{u["url"]}" } || ""
|
||||
subscriber_count = i["subscriberCountText"]?.try &.["simpleText"]?.try &.as_s.try { |s| short_text_to_number(s.split(" ")[0]) } || 0
|
||||
|
||||
auto_generated = false
|
||||
auto_generated = true if !i["videoCountText"]?
|
||||
video_count = i["videoCountText"]?.try &.["runs"].as_a[0]?.try &.["text"].as_s.gsub(/\D/, "").to_i || 0
|
||||
description_html = i["descriptionSnippet"]?.try { |t| parse_content(t) } || ""
|
||||
|
||||
items << SearchChannel.new(
|
||||
author: author,
|
||||
ucid: author_id,
|
||||
author_thumbnail: author_thumbnail,
|
||||
subscriber_count: subscriber_count,
|
||||
video_count: video_count,
|
||||
description_html: description_html,
|
||||
auto_generated: auto_generated,
|
||||
)
|
||||
elsif i = item["playlistRenderer"]?
|
||||
title = i["title"]["simpleText"]?.try &.as_s || ""
|
||||
plid = i["playlistId"]?.try &.as_s || ""
|
||||
|
||||
video_count = i["videoCount"]?.try &.as_s.to_i || 0
|
||||
playlist_thumbnail = i["thumbnails"].as_a[0]?.try &.["thumbnails"]?.try &.as_a[0]?.try &.["url"].as_s || ""
|
||||
|
||||
author_info = i["shortBylineText"]["runs"].as_a[0]?
|
||||
author = author_info.try &.["text"].as_s || ""
|
||||
author_id = author_info.try &.["navigationEndpoint"]?.try &.["browseEndpoint"]["browseId"].as_s || ""
|
||||
|
||||
videos = i["videos"]?.try &.as_a.map do |v|
|
||||
v = v["childVideoRenderer"]
|
||||
v_title = v["title"]["simpleText"]?.try &.as_s || ""
|
||||
v_id = v["videoId"]?.try &.as_s || ""
|
||||
v_length_seconds = v["lengthText"]?.try &.["simpleText"]?.try { |t| decode_length_seconds(t.as_s) } || 0
|
||||
SearchPlaylistVideo.new(
|
||||
title: v_title,
|
||||
id: v_id,
|
||||
length_seconds: v_length_seconds
|
||||
)
|
||||
end || [] of SearchPlaylistVideo
|
||||
|
||||
# TODO: i["publishedTimeText"]?
|
||||
|
||||
items << SearchPlaylist.new(
|
||||
title: title,
|
||||
id: plid,
|
||||
author: author,
|
||||
ucid: author_id,
|
||||
video_count: video_count,
|
||||
videos: videos,
|
||||
thumbnail: playlist_thumbnail
|
||||
)
|
||||
elsif i = item["radioRenderer"]? # Mix
|
||||
# TODO
|
||||
elsif i = item["showRenderer"]? # Show
|
||||
# TODO
|
||||
elsif i = item["shelfRenderer"]?
|
||||
elsif i = item["horizontalCardListRenderer"]?
|
||||
elsif i = item["searchPyvRenderer"]? # Ad
|
||||
end
|
||||
}
|
||||
}
|
||||
|
||||
items
|
||||
end
|
||||
|
||||
def extract_videos_html(nodeset, ucid = nil, author_name = nil)
|
||||
extract_items_html(nodeset, ucid, author_name).select(&.is_a?(SearchVideo)).map(&.as(SearchVideo))
|
||||
end
|
||||
|
||||
def extract_items_html(nodeset, ucid = nil, author_name = nil)
|
||||
# TODO: Make this a 'CommonItem', so it makes more sense to be used here
|
||||
items = [] of SearchItem
|
||||
|
||||
nodeset.each do |node|
|
||||
@ -456,7 +592,7 @@ def extract_items(nodeset, ucid = nil, author_name = nil)
|
||||
paid = true
|
||||
end
|
||||
|
||||
premiere_timestamp = node.xpath_node(%q(.//ul[@class="yt-lockup-meta-info"]/li/span[@class="localized-date"])).try &.["data-timestamp"]?.try &.to_i64
|
||||
premiere_timestamp = node.xpath_node(%q(.//ul[@class="yt-lockup-meta-info"]/li/span[@class="localized-date"])).try &.["data-timestamp"]?.try &.to_i64?
|
||||
if premiere_timestamp
|
||||
premiere_timestamp = Time.unix(premiere_timestamp)
|
||||
end
|
||||
@ -683,12 +819,12 @@ def check_table(db, logger, table_name, struct_type = nil)
|
||||
|
||||
return if column_array.size <= struct_array.size
|
||||
|
||||
# column_array.each do |column|
|
||||
# if !struct_array.includes? column
|
||||
# logger.puts("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
|
||||
# db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
|
||||
# end
|
||||
# end
|
||||
column_array.each do |column|
|
||||
if !struct_array.includes? column
|
||||
logger.puts("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
|
||||
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
class PG::ResultSet
|
||||
@ -864,12 +1000,12 @@ def create_notification_stream(env, topics, connection_channel)
|
||||
end
|
||||
end
|
||||
|
||||
def extract_initial_data(body)
|
||||
initial_data = body.match(/window\["ytInitialData"\] = (?<info>.*?);\n/).try &.["info"] || "{}"
|
||||
def extract_initial_data(body) : Hash(String, JSON::Any)
|
||||
initial_data = body.match(/window\["ytInitialData"\]\s*=\s*(?<info>.*?);+\n/).try &.["info"] || "{}"
|
||||
if initial_data.starts_with?("JSON.parse(\"")
|
||||
return JSON.parse(JSON.parse(%({"initial_data":"#{initial_data[12..-3]}"}))["initial_data"].as_s)
|
||||
return JSON.parse(JSON.parse(%({"initial_data":"#{initial_data[12..-3]}"}))["initial_data"].as_s).as_h
|
||||
else
|
||||
return JSON.parse(initial_data)
|
||||
return JSON.parse(initial_data).as_h
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -201,7 +201,7 @@ end
|
||||
def bypass_captcha(captcha_key, logger)
|
||||
loop do
|
||||
begin
|
||||
{"/watch?v=CvFH_6DNRCY&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999", produce_channel_videos_url(ucid: "UCXuqSBlHAE6Xw-yeJA0Tunw")}.each do |path|
|
||||
{"/watch?v=CvFH_6DNRCY&gl=US&hl=en&has_verified=1&bpctr=9999999999", produce_channel_videos_url(ucid: "UCXuqSBlHAE6Xw-yeJA0Tunw")}.each do |path|
|
||||
response = YT_POOL.client &.get(path)
|
||||
if response.body.includes?("To continue with your YouTube experience, please fill out the form below.")
|
||||
html = XML.parse_html(response.body)
|
||||
|
@ -1,8 +1,8 @@
|
||||
alias SigProc = Proc(Array(String), Int32, Array(String))
|
||||
|
||||
def fetch_decrypt_function(id = "CvFH_6DNRCY")
|
||||
document = YT_POOL.client &.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1").body
|
||||
url = document.match(/src="(?<url>.*player_ias[^\/]+\/en_US\/base.js)"/).not_nil!["url"]
|
||||
document = YT_POOL.client &.get("/watch?v=#{id}&gl=US&hl=en").body
|
||||
url = document.match(/src="(?<url>\/yts\/jsbin\/player_ias-[^\/]+\/en_US\/base.js)"/).not_nil!["url"]
|
||||
player = YT_POOL.client &.get(url).body
|
||||
|
||||
function_name = player.match(/^(?<name>[^=]+)=function\(\w\){\w=\w\.split\(""\);[^\. ]+\.[^( ]+/m).not_nil!["name"]
|
||||
|
@ -8,7 +8,7 @@ def add_yt_headers(request)
|
||||
request.headers["accept-language"] ||= "en-us,en;q=0.5"
|
||||
return if request.resource.starts_with? "/sorry/index"
|
||||
request.headers["x-youtube-client-name"] ||= "1"
|
||||
request.headers["x-youtube-client-version"] ||= "1.20180719"
|
||||
request.headers["x-youtube-client-version"] ||= "2.20200609"
|
||||
if !CONFIG.cookies.empty?
|
||||
request.headers["cookie"] = "#{(CONFIG.cookies.map { |c| "#{c.name}=#{c.value}" }).join("; ")}; #{request.headers["cookie"]?}"
|
||||
end
|
||||
|
@ -20,7 +20,6 @@ end
|
||||
|
||||
def fetch_mix(rdid, video_id, cookies = nil, locale = nil)
|
||||
headers = HTTP::Headers.new
|
||||
headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36"
|
||||
|
||||
if cookies
|
||||
headers = cookies.add_request_headers(headers)
|
||||
|
@ -96,6 +96,10 @@ struct SearchVideo
|
||||
end
|
||||
end
|
||||
|
||||
def is_upcoming
|
||||
premiere_timestamp ? true : false
|
||||
end
|
||||
|
||||
db_mapping({
|
||||
title: String,
|
||||
id: String,
|
||||
@ -227,61 +231,35 @@ end
|
||||
alias SearchItem = SearchVideo | SearchChannel | SearchPlaylist
|
||||
|
||||
def channel_search(query, page, channel)
|
||||
response = YT_POOL.client &.get("/channel/#{channel}?disable_polymer=1&hl=en&gl=US")
|
||||
document = XML.parse_html(response.body)
|
||||
canonical = document.xpath_node(%q(//link[@rel="canonical"]))
|
||||
response = YT_POOL.client &.get("/channel/#{channel}?hl=en&gl=US")
|
||||
response = YT_POOL.client &.get("/user/#{channel}?hl=en&gl=US") if response.headers["location"]?
|
||||
response = YT_POOL.client &.get("/c/#{channel}?hl=en&gl=US") if response.headers["location"]?
|
||||
|
||||
if !canonical
|
||||
response = YT_POOL.client &.get("/c/#{channel}?disable_polymer=1&hl=en&gl=US")
|
||||
document = XML.parse_html(response.body)
|
||||
canonical = document.xpath_node(%q(//link[@rel="canonical"]))
|
||||
end
|
||||
ucid = response.body.match(/\\"channelId\\":\\"(?<ucid>[^\\]+)\\"/).try &.["ucid"]?
|
||||
|
||||
if !canonical
|
||||
response = YT_POOL.client &.get("/user/#{channel}?disable_polymer=1&hl=en&gl=US")
|
||||
document = XML.parse_html(response.body)
|
||||
canonical = document.xpath_node(%q(//link[@rel="canonical"]))
|
||||
end
|
||||
|
||||
if !canonical
|
||||
return 0, [] of SearchItem
|
||||
end
|
||||
|
||||
ucid = canonical["href"].split("/")[-1]
|
||||
return 0, [] of SearchItem if !ucid
|
||||
|
||||
url = produce_channel_search_url(ucid, query, page)
|
||||
response = YT_POOL.client &.get(url)
|
||||
json = JSON.parse(response.body)
|
||||
initial_data = JSON.parse(response.body).as_a.find &.["response"]?
|
||||
return 0, [] of SearchItem if !initial_data
|
||||
items = extract_items(initial_data.as_h)
|
||||
|
||||
if json["content_html"]? && !json["content_html"].as_s.empty?
|
||||
document = XML.parse_html(json["content_html"].as_s)
|
||||
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
|
||||
|
||||
count = nodeset.size
|
||||
items = extract_items(nodeset)
|
||||
else
|
||||
count = 0
|
||||
items = [] of SearchItem
|
||||
end
|
||||
|
||||
return count, items
|
||||
return items.size, items
|
||||
end
|
||||
|
||||
def search(query, page = 1, search_params = produce_search_params(content_type: "all"), region = nil)
|
||||
if query.empty?
|
||||
return {0, [] of SearchItem}
|
||||
end
|
||||
return 0, [] of SearchItem if query.empty?
|
||||
|
||||
html = YT_POOL.client(region, &.get("/results?q=#{URI.encode_www_form(query)}&page=#{page}&sp=#{search_params}&hl=en&disable_polymer=1").body)
|
||||
if html.empty?
|
||||
return {0, [] of SearchItem}
|
||||
end
|
||||
body = YT_POOL.client(region, &.get("/results?q=#{URI.encode_www_form(query)}&page=#{page}&sp=#{search_params}&hl=en").body)
|
||||
return 0, [] of SearchItem if body.empty?
|
||||
|
||||
html = XML.parse_html(html)
|
||||
nodeset = html.xpath_nodes(%q(//ol[@class="item-section"]/li))
|
||||
items = extract_items(nodeset)
|
||||
initial_data = extract_initial_data(body)
|
||||
items = extract_items(initial_data)
|
||||
|
||||
return {nodeset.size, items}
|
||||
# initial_data["estimatedResults"]?.try &.as_s.to_i64
|
||||
|
||||
return items.size, items
|
||||
end
|
||||
|
||||
def produce_search_params(sort : String = "relevance", date : String = "", content_type : String = "",
|
||||
@ -387,12 +365,9 @@ def produce_channel_search_url(ucid, query, page)
|
||||
"2:string" => ucid,
|
||||
"3:base64" => {
|
||||
"2:string" => "search",
|
||||
"6:varint" => 2_i64,
|
||||
"7:varint" => 1_i64,
|
||||
"12:varint" => 1_i64,
|
||||
"13:string" => "",
|
||||
"23:varint" => 0_i64,
|
||||
"15:string" => "#{page}",
|
||||
"23:varint" => 0_i64,
|
||||
},
|
||||
"11:string" => query,
|
||||
},
|
||||
|
@ -1,7 +1,4 @@
|
||||
def fetch_trending(trending_type, region, locale)
|
||||
headers = HTTP::Headers.new
|
||||
headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36"
|
||||
|
||||
region ||= "US"
|
||||
region = region.upcase
|
||||
|
||||
@ -11,7 +8,7 @@ def fetch_trending(trending_type, region, locale)
|
||||
if trending_type && trending_type != "Default"
|
||||
trending_type = trending_type.downcase.capitalize
|
||||
|
||||
response = YT_POOL.client &.get("/feed/trending?gl=#{region}&hl=en", headers).body
|
||||
response = YT_POOL.client &.get("/feed/trending?gl=#{region}&hl=en").body
|
||||
|
||||
initial_data = extract_initial_data(response)
|
||||
|
||||
@ -21,31 +18,28 @@ def fetch_trending(trending_type, region, locale)
|
||||
if url
|
||||
url["channelListSubMenuAvatarRenderer"]["navigationEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"]
|
||||
url = url["channelListSubMenuAvatarRenderer"]["navigationEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"].as_s
|
||||
url += "&disable_polymer=1&gl=#{region}&hl=en"
|
||||
url = "#{url}&gl=#{region}&hl=en"
|
||||
trending = YT_POOL.client &.get(url).body
|
||||
plid = extract_plid(url)
|
||||
else
|
||||
trending = YT_POOL.client &.get("/feed/trending?gl=#{region}&hl=en&disable_polymer=1").body
|
||||
trending = YT_POOL.client &.get("/feed/trending?gl=#{region}&hl=en").body
|
||||
end
|
||||
else
|
||||
trending = YT_POOL.client &.get("/feed/trending?gl=#{region}&hl=en&disable_polymer=1").body
|
||||
trending = YT_POOL.client &.get("/feed/trending?gl=#{region}&hl=en").body
|
||||
end
|
||||
|
||||
trending = XML.parse_html(trending)
|
||||
nodeset = trending.xpath_nodes(%q(//ul/li[@class="expanded-shelf-content-item-wrapper"]))
|
||||
trending = extract_videos(nodeset)
|
||||
initial_data = extract_initial_data(trending)
|
||||
trending = extract_videos(initial_data)
|
||||
|
||||
return {trending, plid}
|
||||
end
|
||||
|
||||
def extract_plid(url)
|
||||
plid = URI.parse(url)
|
||||
.try { |i| HTTP::Params.parse(i.query.not_nil!)["bp"] }
|
||||
return url.try { |i| URI.parse(i).query }
|
||||
.try { |i| HTTP::Params.parse(i)["bp"] }
|
||||
.try { |i| URI.decode_www_form(i) }
|
||||
.try { |i| Base64.decode(i) }
|
||||
.try { |i| IO::Memory.new(i) }
|
||||
.try { |i| Protodec::Any.parse(i) }
|
||||
.try { |i| i["44:0:embedded"]["2:1:string"].as_s }
|
||||
|
||||
return plid
|
||||
.try &.["44:0:embedded"]?.try &.["2:1:string"]?.try &.as_s
|
||||
end
|
||||
|
@ -267,7 +267,7 @@ def subscribe_ajax(channel_id, action, env_headers)
|
||||
end
|
||||
headers = cookies.add_request_headers(headers)
|
||||
|
||||
if match = html.body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/)
|
||||
if match = html.body.match(/'XSRF_TOKEN': "(?<session_token>[^"]+)"/)
|
||||
session_token = match["session_token"]
|
||||
|
||||
headers["content-type"] = "application/x-www-form-urlencoded"
|
||||
@ -300,7 +300,7 @@ end
|
||||
# end
|
||||
# headers = cookies.add_request_headers(headers)
|
||||
#
|
||||
# if match = html.body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/)
|
||||
# if match = html.body.match(/'XSRF_TOKEN': "(?<session_token>[^"]+)"/)
|
||||
# session_token = match["session_token"]
|
||||
#
|
||||
# headers["content-type"] = "application/x-www-form-urlencoded"
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -85,7 +85,7 @@
|
||||
</p>
|
||||
|
||||
<h5 class="pure-g">
|
||||
<% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp && item.premiere_timestamp.not_nil! > Time.utc %>
|
||||
<% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp.try &.> Time.utc %>
|
||||
<div class="pure-u-2-3"><%= translate(locale, "Premieres in `x`", recode_date((item.premiere_timestamp.as(Time) - Time.utc).ago, locale)) %></div>
|
||||
<% elsif Time.utc - item.published > 1.minute %>
|
||||
<div class="pure-u-2-3"><%= translate(locale, "Shared `x` ago", recode_date(item.published, locale)) %></div>
|
||||
@ -144,7 +144,7 @@
|
||||
</p>
|
||||
|
||||
<h5 class="pure-g">
|
||||
<% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp && item.premiere_timestamp.not_nil! > Time.utc %>
|
||||
<% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp.try &.> Time.utc %>
|
||||
<div class="pure-u-2-3"><%= translate(locale, "Premieres in `x`", recode_date((item.premiere_timestamp.as(Time) - Time.utc).ago, locale)) %></div>
|
||||
<% elsif Time.utc - item.published > 1.minute %>
|
||||
<div class="pure-u-2-3"><%= translate(locale, "Shared `x` ago", recode_date(item.published, locale)) %></div>
|
||||
|
@ -3,23 +3,23 @@
|
||||
<% if params.autoplay %>autoplay<% end %>
|
||||
<% if params.video_loop %>loop<% end %>
|
||||
<% if params.controls %>controls<% end %>>
|
||||
<% if hlsvp && !CONFIG.disabled?("livestreams") %>
|
||||
<source src="<%= hlsvp %>?local=true" type="application/x-mpegURL" label="livestream">
|
||||
<% if (hlsvp = video.hls_manifest_url) && !CONFIG.disabled?("livestreams") %>
|
||||
<source src="<%= URI.parse(hlsvp).full_path %>?local=true" type="application/x-mpegURL" label="livestream">
|
||||
<% else %>
|
||||
<% if params.listen %>
|
||||
<% audio_streams.each_with_index do |fmt, i| %>
|
||||
<source src="/latest_version?id=<%= video.id %>&itag=<%= fmt["itag"] %><% if params.local %>&local=true<% end %>" type='<%= fmt["type"] %>' label="<%= fmt["bitrate"] %>k" selected="<%= i == 0 ? true : false %>">
|
||||
<source src="/latest_version?id=<%= video.id %>&itag=<%= fmt["itag"] %><% if params.local %>&local=true<% end %>" type='<%= fmt["mimeType"] %>' label="<%= fmt["bitrate"] %>k" selected="<%= i == 0 ? true : false %>">
|
||||
<% end %>
|
||||
<% else %>
|
||||
<% if params.quality == "dash" %>
|
||||
<source src="/api/manifest/dash/id/<%= video.id %>?local=true" type='application/dash+xml' label="dash">
|
||||
<source src="/api/manifest/dash/id/<%= video.id %>?local=true&unique_res=1" type='application/dash+xml' label="dash">
|
||||
<% end %>
|
||||
|
||||
<% fmt_stream.each_with_index do |fmt, i| %>
|
||||
<% if params.quality %>
|
||||
<source src="/latest_version?id=<%= video.id %>&itag=<%= fmt["itag"] %><% if params.local %>&local=true<% end %>" type='<%= fmt["type"] %>' label="<%= fmt["label"] %>" selected="<%= params.quality == fmt["label"].split(" - ")[0] %>">
|
||||
<source src="/latest_version?id=<%= video.id %>&itag=<%= fmt["itag"] %><% if params.local %>&local=true<% end %>" type='<%= fmt["mimeType"] %>' label="<%= fmt["quality"] %>" selected="<%= params.quality == fmt["quality"] %>">
|
||||
<% else %>
|
||||
<source src="/latest_version?id=<%= video.id %>&itag=<%= fmt["itag"] %><% if params.local %>&local=true<% end %>" type='<%= fmt["type"] %>' label="<%= fmt["label"] %>" selected="<%= i == 0 ? true : false %>">
|
||||
<source src="/latest_version?id=<%= video.id %>&itag=<%= fmt["itag"] %><% if params.local %>&local=true<% end %>" type='<%= fmt["mimeType"] %>' label="<%= fmt["quality"] %>" selected="<%= i == 0 ? true : false %>">
|
||||
<% end %>
|
||||
<% end %>
|
||||
<% end %>
|
||||
|
@ -33,8 +33,8 @@
|
||||
"index" => continuation,
|
||||
"plid" => plid,
|
||||
"length_seconds" => video.length_seconds.to_f,
|
||||
"play_next" => !rvs.empty? && !plid && params.continue,
|
||||
"next_video" => rvs.select { |rv| rv["id"]? }[0]?.try &.["id"],
|
||||
"play_next" => !video.related_videos.empty? && !plid && params.continue,
|
||||
"next_video" => video.related_videos.select { |rv| rv["id"]? }[0]?.try &.["id"],
|
||||
"youtube_comments_text" => HTML.escape(translate(locale, "View YouTube comments")),
|
||||
"reddit_comments_text" => HTML.escape(translate(locale, "View Reddit comments")),
|
||||
"reddit_permalink_text" => HTML.escape(translate(locale, "View more comments on Reddit")),
|
||||
@ -72,13 +72,13 @@
|
||||
</h3>
|
||||
<% end %>
|
||||
|
||||
<% if !reason.empty? %>
|
||||
<% if video.reason %>
|
||||
<h3>
|
||||
<%= reason %>
|
||||
<%= video.reason %>
|
||||
</h3>
|
||||
<% elsif video.premiere_timestamp %>
|
||||
<% elsif video.premiere_timestamp.try &.> Time.utc %>
|
||||
<h3>
|
||||
<%= translate(locale, "Premieres in `x`", recode_date((video.premiere_timestamp.as(Time) - Time.utc).ago, locale)) %>
|
||||
<%= video.premiere_timestamp.try { |t| translate(locale, "Premieres in `x`", recode_date((t - Time.utc).ago, locale)) } %>
|
||||
</h3>
|
||||
<% end %>
|
||||
</div>
|
||||
@ -137,18 +137,18 @@
|
||||
<label for="download_widget"><%= translate(locale, "Download as: ") %></label>
|
||||
<select style="width:100%" name="download_widget" id="download_widget">
|
||||
<% fmt_stream.each do |option| %>
|
||||
<option value='{"id":"<%= video.id %>","itag":"<%= option["itag"] %>","title":"<%= URI.encode_www_form(video.title) %>-<%= video.id %>.<%= option["type"].split(";")[0].split("/")[1] %>"}'>
|
||||
<%= itag_to_metadata?(option["itag"]).try &.["height"]? || "~240" %>p - <%= option["type"].split(";")[0] %>
|
||||
<option value='{"id":"<%= video.id %>","itag":"<%= option["itag"] %>","title":"<%= URI.encode_www_form(video.title) %>-<%= video.id %>.<%= option["mimeType"].as_s.split(";")[0].split("/")[1] %>"}'>
|
||||
<%= itag_to_metadata?(option["itag"]).try &.["height"]? || "~240" %>p - <%= option["mimeType"].as_s.split(";")[0] %>
|
||||
</option>
|
||||
<% end %>
|
||||
<% video_streams.each do |option| %>
|
||||
<option value='{"id":"<%= video.id %>","itag":"<%= option["itag"] %>","title":"<%= URI.encode_www_form(video.title) %>-<%= video.id %>.<%= option["type"].split(";")[0].split("/")[1] %>"}'>
|
||||
<%= option["quality_label"] %> - <%= option["type"].split(";")[0] %> @ <%= option["fps"] %>fps - video only
|
||||
<option value='{"id":"<%= video.id %>","itag":"<%= option["itag"] %>","title":"<%= URI.encode_www_form(video.title) %>-<%= video.id %>.<%= option["mimeType"].as_s.split(";")[0].split("/")[1] %>"}'>
|
||||
<%= option["qualityLabel"] %> - <%= option["mimeType"].as_s.split(";")[0] %> @ <%= option["fps"] %>fps - video only
|
||||
</option>
|
||||
<% end %>
|
||||
<% audio_streams.each do |option| %>
|
||||
<option value='{"id":"<%= video.id %>","itag":"<%= option["itag"] %>","title":"<%= URI.encode_www_form(video.title) %>-<%= video.id %>.<%= option["type"].split(";")[0].split("/")[1] %>"}'>
|
||||
<%= option["type"].split(";")[0] %> @ <%= option["bitrate"] %>k - audio only
|
||||
<option value='{"id":"<%= video.id %>","itag":"<%= option["itag"] %>","title":"<%= URI.encode_www_form(video.title) %>-<%= video.id %>.<%= option["mimeType"].as_s.split(";")[0].split("/")[1] %>"}'>
|
||||
<%= option["mimeType"].as_s.split(";")[0] %> @ <%= option["bitrate"]?.try &.as_i./ 1000 %>k - audio only
|
||||
</option>
|
||||
<% end %>
|
||||
<% captions.each do |caption| %>
|
||||
@ -169,19 +169,19 @@
|
||||
<p id="likes"><i class="icon ion-ios-thumbs-up"></i> <%= number_with_separator(video.likes) %></p>
|
||||
<p id="dislikes"><i class="icon ion-ios-thumbs-down"></i> <%= number_with_separator(video.dislikes) %></p>
|
||||
<p id="genre"><%= translate(locale, "Genre: ") %>
|
||||
<% if video.genre_url.empty? %>
|
||||
<% if !video.genre_url %>
|
||||
<%= video.genre %>
|
||||
<% else %>
|
||||
<a href="<%= video.genre_url %>"><%= video.genre %></a>
|
||||
<% end %>
|
||||
</p>
|
||||
<% if !video.license.empty? %>
|
||||
<% if video.license %>
|
||||
<p id="license"><%= translate(locale, "License: ") %><%= video.license %></p>
|
||||
<% end %>
|
||||
<p id="family_friendly"><%= translate(locale, "Family friendly? ") %><%= translate_bool(locale, video.is_family_friendly) %></p>
|
||||
<p id="wilson"><%= translate(locale, "Wilson score: ") %><%= video.wilson_score.round(4) %></p>
|
||||
<p id="rating"><%= translate(locale, "Rating: ") %><%= rating.round(4) %> / 5</p>
|
||||
<p id="engagement"><%= translate(locale, "Engagement: ") %><%= engagement.round(2) %>%</p>
|
||||
<p id="wilson"><%= translate(locale, "Wilson score: ") %><%= video.wilson_score %></p>
|
||||
<p id="rating"><%= translate(locale, "Rating: ") %><%= video.average_rating %> / 5</p>
|
||||
<p id="engagement"><%= translate(locale, "Engagement: ") %><%= video.engagement %>%</p>
|
||||
<% if video.allowed_regions.size != REGIONS.size %>
|
||||
<p id="allowed_regions">
|
||||
<% if video.allowed_regions.size < REGIONS.size // 2 %>
|
||||
@ -198,7 +198,9 @@
|
||||
<div class="h-box">
|
||||
<a href="/channel/<%= video.ucid %>" style="display:block;width:fit-content;width:-moz-fit-content">
|
||||
<div class="channel-profile">
|
||||
<img src="/ggpht<%= URI.parse(video.author_thumbnail).full_path %>">
|
||||
<% if !video.author_thumbnail.empty? %>
|
||||
<img src="/ggpht<%= URI.parse(video.author_thumbnail).full_path %>">
|
||||
<% end %>
|
||||
<span id="channel-name"><%= video.author %></span>
|
||||
</div>
|
||||
</a>
|
||||
@ -209,8 +211,8 @@
|
||||
<%= rendered "components/subscribe_widget" %>
|
||||
|
||||
<p id="published-date">
|
||||
<% if video.premiere_timestamp %>
|
||||
<b><%= translate(locale, "Premieres `x`", video.premiere_timestamp.not_nil!.to_s("%B %-d, %R UTC")) %></b>
|
||||
<% if video.premiere_timestamp.try &.> Time.utc %>
|
||||
<b><%= video.premiere_timestamp.try { |t| translate(locale, "Premieres `x`", t.to_s("%B %-d, %R UTC")) } %></b>
|
||||
<% else %>
|
||||
<b><%= translate(locale, "Shared `x`", video.published.to_s("%B %-d, %Y")) %></b>
|
||||
<% end %>
|
||||
@ -244,7 +246,7 @@
|
||||
|
||||
<% if params.related_videos %>
|
||||
<div class="h-box">
|
||||
<% if !rvs.empty? %>
|
||||
<% if !video.related_videos.empty? %>
|
||||
<div <% if plid %>style="display:none"<% end %>>
|
||||
<div class="pure-control-group">
|
||||
<label for="continue"><%= translate(locale, "Play next by default: ") %></label>
|
||||
@ -254,7 +256,7 @@
|
||||
</div>
|
||||
<% end %>
|
||||
|
||||
<% rvs.each do |rv| %>
|
||||
<% video.related_videos.each do |rv| %>
|
||||
<% if rv["id"]? %>
|
||||
<a href="/watch?v=<%= rv["id"] %>">
|
||||
<% if !env.get("preferences").as(Preferences).thin_mode %>
|
||||
@ -267,15 +269,17 @@
|
||||
<h5 class="pure-g">
|
||||
<div class="pure-u-14-24">
|
||||
<% if rv["ucid"]? %>
|
||||
<b style="width:100%"><a href="/channel/<%= rv["ucid"] %>"><%= rv["author"] %></a></b>
|
||||
<b style="width:100%"><a href="/channel/<%= rv["ucid"] %>"><%= rv["author"]? %></a></b>
|
||||
<% else %>
|
||||
<b style="width:100%"><%= rv["author"] %></b>
|
||||
<b style="width:100%"><%= rv["author"]? %></b>
|
||||
<% end %>
|
||||
</div>
|
||||
|
||||
<div class="pure-u-10-24" style="text-align:right">
|
||||
<% if views = rv["short_view_count_text"]?.try &.delete(", views watching") %>
|
||||
<b class="width:100%"><%= translate(locale, "`x` views", views) %></b>
|
||||
<% if !views.empty? %>
|
||||
<b class="width:100%"><%= translate(locale, "`x` views", views) %></b>
|
||||
<% end %>
|
||||
<% end %>
|
||||
</div>
|
||||
</h5>
|
||||
|
Loading…
Reference in New Issue
Block a user