2019-11-19 03:58:32 +05:30
|
|
|
require "lsquic"
|
2021-05-24 19:15:50 +05:30
|
|
|
require "db"
|
2019-10-25 22:28:16 +05:30
|
|
|
|
2019-11-19 03:58:32 +05:30
|
|
|
def add_yt_headers(request)
|
|
|
|
request.headers["user-agent"] ||= "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.97 Safari/537.36"
|
|
|
|
request.headers["accept-charset"] ||= "ISO-8859-1,utf-8;q=0.7,*;q=0.7"
|
|
|
|
request.headers["accept"] ||= "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"
|
|
|
|
request.headers["accept-language"] ||= "en-us,en;q=0.5"
|
2020-05-25 23:22:15 +05:30
|
|
|
return if request.resource.starts_with? "/sorry/index"
|
|
|
|
request.headers["x-youtube-client-name"] ||= "1"
|
2020-06-16 04:03:23 +05:30
|
|
|
request.headers["x-youtube-client-version"] ||= "2.20200609"
|
2021-04-03 05:38:55 +05:30
|
|
|
# Preserve original cookies and add new YT consent cookie for EU servers
|
|
|
|
request.headers["cookie"] = "#{request.headers["cookie"]?}; CONSENT=YES+"
|
2020-05-25 23:22:15 +05:30
|
|
|
if !CONFIG.cookies.empty?
|
|
|
|
request.headers["cookie"] = "#{(CONFIG.cookies.map { |c| "#{c.name}=#{c.value}" }).join("; ")}; #{request.headers["cookie"]?}"
|
|
|
|
end
|
2019-10-26 03:32:33 +05:30
|
|
|
end
|
|
|
|
|
2021-04-04 01:41:35 +05:30
|
|
|
struct YoutubeConnectionPool
|
2019-10-25 22:28:16 +05:30
|
|
|
property! url : URI
|
|
|
|
property! capacity : Int32
|
|
|
|
property! timeout : Float64
|
2021-05-24 19:15:50 +05:30
|
|
|
property pool : DB::Pool(QUIC::Client | HTTP::Client)
|
2019-10-25 22:28:16 +05:30
|
|
|
|
2021-04-04 01:41:35 +05:30
|
|
|
def initialize(url : URI, @capacity = 5, @timeout = 5.0, use_quic = true)
|
2019-10-25 22:28:16 +05:30
|
|
|
@url = url
|
2021-04-04 01:41:35 +05:30
|
|
|
@pool = build_pool(use_quic)
|
2019-10-25 22:28:16 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def client(region = nil, &block)
|
2019-11-25 00:11:47 +05:30
|
|
|
if region
|
|
|
|
conn = make_client(url, region)
|
2019-10-26 08:36:08 +05:30
|
|
|
response = yield conn
|
2019-11-25 00:11:47 +05:30
|
|
|
else
|
|
|
|
conn = pool.checkout
|
|
|
|
begin
|
|
|
|
response = yield conn
|
|
|
|
rescue ex
|
2019-11-28 19:49:28 +05:30
|
|
|
conn.close
|
2019-11-25 00:11:47 +05:30
|
|
|
conn = QUIC::Client.new(url)
|
2019-11-28 19:49:28 +05:30
|
|
|
conn.family = (url.host == "www.youtube.com") ? CONFIG.force_resolve : Socket::Family::INET
|
|
|
|
conn.family = Socket::Family::INET if conn.family == Socket::Family::UNSPEC
|
2019-11-25 00:11:47 +05:30
|
|
|
conn.before_request { |r| add_yt_headers(r) } if url.host == "www.youtube.com"
|
|
|
|
response = yield conn
|
|
|
|
ensure
|
2021-05-24 19:15:50 +05:30
|
|
|
pool.release(conn)
|
2019-10-26 21:13:28 +05:30
|
|
|
end
|
2019-10-25 22:28:16 +05:30
|
|
|
end
|
2019-11-25 00:11:47 +05:30
|
|
|
|
|
|
|
response
|
2019-10-25 22:28:16 +05:30
|
|
|
end
|
|
|
|
|
2021-04-04 01:41:35 +05:30
|
|
|
private def build_pool(use_quic)
|
2021-05-24 19:15:50 +05:30
|
|
|
DB::Pool(QUIC::Client | HTTP::Client).new(initial_pool_size: 0, max_pool_size: capacity, max_idle_pool_size: capacity, checkout_timeout: timeout) do
|
2021-04-04 01:41:35 +05:30
|
|
|
if use_quic
|
|
|
|
conn = QUIC::Client.new(url)
|
|
|
|
else
|
|
|
|
conn = HTTP::Client.new(url)
|
|
|
|
end
|
2019-11-28 19:49:28 +05:30
|
|
|
conn.family = (url.host == "www.youtube.com") ? CONFIG.force_resolve : Socket::Family::INET
|
|
|
|
conn.family = Socket::Family::INET if conn.family == Socket::Family::UNSPEC
|
|
|
|
conn.before_request { |r| add_yt_headers(r) } if url.host == "www.youtube.com"
|
|
|
|
conn
|
2019-10-25 22:28:16 +05:30
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-05 02:00:44 +05:30
|
|
|
# See http://www.evanmiller.org/how-not-to-sort-by-average-rating.html
|
|
|
|
def ci_lower_bound(pos, n)
|
|
|
|
if n == 0
|
|
|
|
return 0.0
|
|
|
|
end
|
|
|
|
|
|
|
|
# z value here represents a confidence level of 0.95
|
|
|
|
z = 1.96
|
|
|
|
phat = 1.0*pos/n
|
|
|
|
|
|
|
|
return (phat + z*z/(2*n) - z * Math.sqrt((phat*(1 - phat) + z*z/(4*n))/n))/(1 + z*z/n)
|
|
|
|
end
|
|
|
|
|
|
|
|
def elapsed_text(elapsed)
|
|
|
|
millis = elapsed.total_milliseconds
|
|
|
|
return "#{millis.round(2)}ms" if millis >= 1
|
|
|
|
|
|
|
|
"#{(millis * 1000).round(2)}µs"
|
|
|
|
end
|
|
|
|
|
2019-06-29 07:47:56 +05:30
|
|
|
def make_client(url : URI, region = nil)
|
2020-03-04 23:35:10 +05:30
|
|
|
# TODO: Migrate any applicable endpoints to QUIC
|
|
|
|
client = HTTPClient.new(url, OpenSSL::SSL::Context::Client.insecure)
|
2019-10-18 22:11:03 +05:30
|
|
|
client.family = (url.host == "www.youtube.com") ? CONFIG.force_resolve : Socket::Family::UNSPEC
|
2020-12-06 08:41:41 +05:30
|
|
|
client.before_request { |r| add_yt_headers(r) } if url.host == "www.youtube.com"
|
2019-10-28 22:04:50 +05:30
|
|
|
client.read_timeout = 10.seconds
|
|
|
|
client.connect_timeout = 10.seconds
|
2018-11-18 05:03:30 +05:30
|
|
|
|
|
|
|
if region
|
2019-06-29 07:47:56 +05:30
|
|
|
PROXY_LIST[region]?.try &.sample(40).each do |proxy|
|
2018-11-18 05:03:30 +05:30
|
|
|
begin
|
|
|
|
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
|
|
|
|
client.set_proxy(proxy)
|
|
|
|
break
|
|
|
|
rescue ex
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-05 02:00:44 +05:30
|
|
|
return client
|
|
|
|
end
|
|
|
|
|
2020-12-23 11:22:23 +05:30
|
|
|
def make_client(url : URI, region = nil, &block)
|
|
|
|
client = make_client(url, region)
|
|
|
|
begin
|
|
|
|
yield client
|
|
|
|
ensure
|
|
|
|
client.close
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-05 02:00:44 +05:30
|
|
|
def decode_length_seconds(string)
|
2019-10-26 19:47:25 +05:30
|
|
|
length_seconds = string.gsub(/[^0-9:]/, "").split(":").map &.to_i
|
2018-08-05 02:00:44 +05:30
|
|
|
length_seconds = [0] * (3 - length_seconds.size) + length_seconds
|
2020-04-09 22:48:09 +05:30
|
|
|
length_seconds = Time::Span.new hours: length_seconds[0], minutes: length_seconds[1], seconds: length_seconds[2]
|
2018-08-05 02:00:44 +05:30
|
|
|
length_seconds = length_seconds.total_seconds.to_i
|
|
|
|
|
|
|
|
return length_seconds
|
|
|
|
end
|
|
|
|
|
2018-10-21 07:07:55 +05:30
|
|
|
def recode_length_seconds(time)
|
|
|
|
if time <= 0
|
|
|
|
return ""
|
|
|
|
else
|
|
|
|
time = time.seconds
|
|
|
|
text = "#{time.minutes.to_s.rjust(2, '0')}:#{time.seconds.to_s.rjust(2, '0')}"
|
|
|
|
|
2019-05-11 21:27:58 +05:30
|
|
|
if time.total_hours.to_i > 0
|
|
|
|
text = "#{time.total_hours.to_i.to_s.rjust(2, '0')}:#{text}"
|
2018-10-21 07:07:55 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
text = text.lchop('0')
|
|
|
|
|
|
|
|
return text
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-05 02:00:44 +05:30
|
|
|
def decode_time(string)
|
|
|
|
time = string.try &.to_f?
|
|
|
|
|
|
|
|
if !time
|
|
|
|
hours = /(?<hours>\d+)h/.match(string).try &.["hours"].try &.to_f
|
|
|
|
hours ||= 0
|
|
|
|
|
|
|
|
minutes = /(?<minutes>\d+)m(?!s)/.match(string).try &.["minutes"].try &.to_f
|
|
|
|
minutes ||= 0
|
|
|
|
|
|
|
|
seconds = /(?<seconds>\d+)s/.match(string).try &.["seconds"].try &.to_f
|
|
|
|
seconds ||= 0
|
|
|
|
|
|
|
|
millis = /(?<millis>\d+)ms/.match(string).try &.["millis"].try &.to_f
|
|
|
|
millis ||= 0
|
|
|
|
|
2019-06-08 06:53:37 +05:30
|
|
|
time = hours * 3600 + minutes * 60 + seconds + millis // 1000
|
2018-08-05 02:00:44 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
return time
|
|
|
|
end
|
|
|
|
|
|
|
|
def decode_date(string : String)
|
2018-08-06 05:05:52 +05:30
|
|
|
# String matches 'YYYY'
|
2018-08-15 20:52:36 +05:30
|
|
|
if string.match(/^\d{4}/)
|
2019-06-08 06:53:37 +05:30
|
|
|
return Time.utc(string.to_i, 1, 1)
|
2018-08-06 05:05:52 +05:30
|
|
|
end
|
|
|
|
|
2018-08-15 20:52:36 +05:30
|
|
|
# Try to parse as format Jul 10, 2000
|
|
|
|
begin
|
|
|
|
return Time.parse(string, "%b %-d, %Y", Time::Location.local)
|
|
|
|
rescue ex
|
|
|
|
end
|
|
|
|
|
|
|
|
case string
|
|
|
|
when "today"
|
2019-06-08 06:26:41 +05:30
|
|
|
return Time.utc
|
2018-08-15 20:52:36 +05:30
|
|
|
when "yesterday"
|
2019-06-08 06:26:41 +05:30
|
|
|
return Time.utc - 1.day
|
2020-04-09 22:48:09 +05:30
|
|
|
else nil # Continue
|
2018-08-15 20:52:36 +05:30
|
|
|
end
|
|
|
|
|
2018-08-08 20:50:07 +05:30
|
|
|
# String matches format "20 hours ago", "4 months ago"...
|
2018-08-05 02:00:44 +05:30
|
|
|
date = string.split(" ")[-3, 3]
|
|
|
|
delta = date[0].to_i
|
|
|
|
|
|
|
|
case date[1]
|
2018-08-07 18:40:24 +05:30
|
|
|
when .includes? "second"
|
|
|
|
delta = delta.seconds
|
2018-08-05 02:00:44 +05:30
|
|
|
when .includes? "minute"
|
|
|
|
delta = delta.minutes
|
|
|
|
when .includes? "hour"
|
|
|
|
delta = delta.hours
|
|
|
|
when .includes? "day"
|
|
|
|
delta = delta.days
|
|
|
|
when .includes? "week"
|
|
|
|
delta = delta.weeks
|
|
|
|
when .includes? "month"
|
|
|
|
delta = delta.months
|
|
|
|
when .includes? "year"
|
|
|
|
delta = delta.years
|
|
|
|
else
|
|
|
|
raise "Could not parse #{string}"
|
|
|
|
end
|
|
|
|
|
2019-06-08 06:26:41 +05:30
|
|
|
return Time.utc - delta
|
2018-08-05 02:00:44 +05:30
|
|
|
end
|
|
|
|
|
2019-02-20 20:19:39 +05:30
|
|
|
def recode_date(time : Time, locale)
|
2019-06-08 06:26:41 +05:30
|
|
|
span = Time.utc - time
|
2018-08-05 02:00:44 +05:30
|
|
|
|
|
|
|
if span.total_days > 365.0
|
2019-06-08 06:53:37 +05:30
|
|
|
span = translate(locale, "`x` years", (span.total_days.to_i // 365).to_s)
|
2018-08-05 02:00:44 +05:30
|
|
|
elsif span.total_days > 30.0
|
2019-06-08 06:53:37 +05:30
|
|
|
span = translate(locale, "`x` months", (span.total_days.to_i // 30).to_s)
|
2018-08-05 02:00:44 +05:30
|
|
|
elsif span.total_days > 7.0
|
2019-06-08 06:53:37 +05:30
|
|
|
span = translate(locale, "`x` weeks", (span.total_days.to_i // 7).to_s)
|
2018-08-05 02:00:44 +05:30
|
|
|
elsif span.total_hours > 24.0
|
2019-02-20 21:07:33 +05:30
|
|
|
span = translate(locale, "`x` days", (span.total_days.to_i).to_s)
|
2018-08-05 02:00:44 +05:30
|
|
|
elsif span.total_minutes > 60.0
|
2019-02-20 21:07:33 +05:30
|
|
|
span = translate(locale, "`x` hours", (span.total_hours.to_i).to_s)
|
2018-08-07 18:40:24 +05:30
|
|
|
elsif span.total_seconds > 60.0
|
2019-02-20 21:07:33 +05:30
|
|
|
span = translate(locale, "`x` minutes", (span.total_minutes.to_i).to_s)
|
2018-08-05 02:00:44 +05:30
|
|
|
else
|
2019-02-20 21:07:33 +05:30
|
|
|
span = translate(locale, "`x` seconds", (span.total_seconds.to_i).to_s)
|
2018-08-05 02:00:44 +05:30
|
|
|
end
|
|
|
|
|
2019-02-20 20:19:39 +05:30
|
|
|
return span
|
2018-08-05 02:00:44 +05:30
|
|
|
end
|
|
|
|
|
|
|
|
def number_with_separator(number)
|
|
|
|
number.to_s.reverse.gsub(/(\d{3})(?=\d)/, "\\1,").reverse
|
|
|
|
end
|
|
|
|
|
2019-09-13 06:39:23 +05:30
|
|
|
def short_text_to_number(short_text : String) : Int32
|
2019-03-17 19:30:00 +05:30
|
|
|
case short_text
|
|
|
|
when .ends_with? "M"
|
|
|
|
number = short_text.rstrip(" mM").to_f
|
|
|
|
number *= 1000000
|
|
|
|
when .ends_with? "K"
|
|
|
|
number = short_text.rstrip(" kK").to_f
|
|
|
|
number *= 1000
|
|
|
|
else
|
|
|
|
number = short_text.rstrip(" ")
|
|
|
|
end
|
|
|
|
|
|
|
|
number = number.to_i
|
|
|
|
|
|
|
|
return number
|
|
|
|
end
|
|
|
|
|
2018-10-19 21:44:26 +05:30
|
|
|
def number_to_short_text(number)
|
|
|
|
seperated = number_with_separator(number).gsub(",", ".").split("")
|
|
|
|
text = seperated.first(2).join
|
|
|
|
|
|
|
|
if seperated[2]? && seperated[2] != "."
|
|
|
|
text += seperated[2]
|
|
|
|
end
|
|
|
|
|
|
|
|
text = text.rchop(".0")
|
|
|
|
|
2019-06-08 06:53:37 +05:30
|
|
|
if number // 1_000_000_000 != 0
|
2019-04-15 04:13:44 +05:30
|
|
|
text += "B"
|
2019-06-08 06:53:37 +05:30
|
|
|
elsif number // 1_000_000 != 0
|
2018-10-19 21:44:26 +05:30
|
|
|
text += "M"
|
2019-06-08 06:53:37 +05:30
|
|
|
elsif number // 1000 != 0
|
2018-10-19 21:44:26 +05:30
|
|
|
text += "K"
|
|
|
|
end
|
|
|
|
|
|
|
|
text
|
|
|
|
end
|
|
|
|
|
2018-08-05 02:00:44 +05:30
|
|
|
def arg_array(array, start = 1)
|
|
|
|
if array.size == 0
|
|
|
|
args = "NULL"
|
|
|
|
else
|
|
|
|
args = [] of String
|
|
|
|
(start..array.size + start - 1).each { |i| args << "($#{i})" }
|
|
|
|
args = args.join(",")
|
|
|
|
end
|
|
|
|
|
|
|
|
return args
|
|
|
|
end
|
2018-08-05 09:37:38 +05:30
|
|
|
|
2021-01-24 00:09:04 +05:30
|
|
|
def make_host_url(kemal_config)
|
|
|
|
ssl = CONFIG.https_only || kemal_config.ssl
|
|
|
|
port = CONFIG.external_port || kemal_config.port
|
2019-03-06 00:26:59 +05:30
|
|
|
|
2018-08-05 09:37:38 +05:30
|
|
|
if ssl
|
|
|
|
scheme = "https://"
|
|
|
|
else
|
|
|
|
scheme = "http://"
|
|
|
|
end
|
|
|
|
|
2019-03-08 23:07:52 +05:30
|
|
|
# Add if non-standard port
|
|
|
|
if port != 80 && port != 443
|
2021-05-29 09:12:44 +05:30
|
|
|
port = ":#{port}"
|
2019-03-03 23:25:14 +05:30
|
|
|
else
|
2019-03-06 00:26:59 +05:30
|
|
|
port = ""
|
|
|
|
end
|
|
|
|
|
2021-01-24 00:09:04 +05:30
|
|
|
if !CONFIG.domain
|
2019-03-03 23:25:14 +05:30
|
|
|
return ""
|
|
|
|
end
|
2019-03-06 00:26:59 +05:30
|
|
|
|
2021-01-24 00:09:04 +05:30
|
|
|
host = CONFIG.domain.not_nil!.lchop(".")
|
2019-03-06 00:26:59 +05:30
|
|
|
|
|
|
|
return "#{scheme}#{host}#{port}"
|
2018-08-05 09:37:38 +05:30
|
|
|
end
|
2018-08-09 06:56:02 +05:30
|
|
|
|
2019-06-05 06:28:56 +05:30
|
|
|
def get_referer(env, fallback = "/", unroll = true)
|
2018-08-17 20:49:20 +05:30
|
|
|
referer = env.params.query["referer"]?
|
|
|
|
referer ||= env.request.headers["referer"]?
|
2018-08-09 06:56:02 +05:30
|
|
|
referer ||= fallback
|
|
|
|
|
2018-08-17 20:49:20 +05:30
|
|
|
referer = URI.parse(referer)
|
|
|
|
|
2018-09-06 07:40:32 +05:30
|
|
|
# "Unroll" nested referrers
|
2019-06-05 06:28:56 +05:30
|
|
|
if unroll
|
|
|
|
loop do
|
|
|
|
if referer.query
|
|
|
|
params = HTTP::Params.parse(referer.query.not_nil!)
|
|
|
|
if params["referer"]?
|
2019-09-24 23:01:33 +05:30
|
|
|
referer = URI.parse(URI.decode_www_form(params["referer"]))
|
2019-06-05 06:28:56 +05:30
|
|
|
else
|
|
|
|
break
|
|
|
|
end
|
2018-08-17 20:49:20 +05:30
|
|
|
else
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-02-01 00:22:32 +05:30
|
|
|
referer = referer.request_target
|
2020-03-16 03:07:51 +05:30
|
|
|
referer = "/" + referer.gsub(/[^\/?@&%=\-_.0-9a-zA-Z]/, "").lstrip("/\\")
|
2018-08-09 06:56:02 +05:30
|
|
|
|
|
|
|
if referer == env.request.path
|
|
|
|
referer = fallback
|
|
|
|
end
|
|
|
|
|
|
|
|
return referer
|
|
|
|
end
|
2018-09-04 19:22:30 +05:30
|
|
|
|
2018-10-09 19:10:29 +05:30
|
|
|
def sha256(text)
|
|
|
|
digest = OpenSSL::Digest.new("SHA256")
|
|
|
|
digest << text
|
2020-06-16 04:27:20 +05:30
|
|
|
return digest.final.hexstring
|
2018-10-09 19:10:29 +05:30
|
|
|
end
|
2019-06-08 06:26:41 +05:30
|
|
|
|
2021-01-24 00:09:04 +05:30
|
|
|
def subscribe_pubsub(topic, key)
|
2019-06-08 06:26:41 +05:30
|
|
|
case topic
|
|
|
|
when .match(/^UC[A-Za-z0-9_-]{22}$/)
|
|
|
|
topic = "channel_id=#{topic}"
|
2019-06-08 08:09:32 +05:30
|
|
|
when .match(/^(PL|LL|EC|UU|FL|UL|OLAK5uy_)[0-9A-Za-z-_]{10,}$/)
|
2019-06-08 06:26:41 +05:30
|
|
|
# There's a couple missing from the above regex, namely TL and RD, which
|
|
|
|
# don't have feeds
|
|
|
|
topic = "playlist_id=#{topic}"
|
|
|
|
else
|
|
|
|
# TODO
|
|
|
|
end
|
|
|
|
|
|
|
|
time = Time.utc.to_unix.to_s
|
|
|
|
nonce = Random::Secure.hex(4)
|
|
|
|
signature = "#{time}:#{nonce}"
|
|
|
|
|
|
|
|
body = {
|
2020-06-16 03:40:30 +05:30
|
|
|
"hub.callback" => "#{HOST_URL}/feed/webhook/v1:#{time}:#{nonce}:#{OpenSSL::HMAC.hexdigest(:sha1, key, signature)}",
|
2019-06-08 06:26:41 +05:30
|
|
|
"hub.topic" => "https://www.youtube.com/xml/feeds/videos.xml?#{topic}",
|
|
|
|
"hub.verify" => "async",
|
|
|
|
"hub.mode" => "subscribe",
|
|
|
|
"hub.lease_seconds" => "432000",
|
|
|
|
"hub.secret" => key.to_s,
|
|
|
|
}
|
|
|
|
|
2020-12-23 11:22:23 +05:30
|
|
|
return make_client(PUBSUB_URL, &.post("/subscribe", form: body))
|
2019-06-08 06:26:41 +05:30
|
|
|
end
|
2019-07-05 02:00:00 +05:30
|
|
|
|
|
|
|
def parse_range(range)
|
|
|
|
if !range
|
|
|
|
return 0_i64, nil
|
|
|
|
end
|
|
|
|
|
|
|
|
ranges = range.lchop("bytes=").split(',')
|
|
|
|
ranges.each do |range|
|
|
|
|
start_range, end_range = range.split('-')
|
|
|
|
|
|
|
|
start_range = start_range.to_i64? || 0_i64
|
|
|
|
end_range = end_range.to_i64?
|
|
|
|
|
|
|
|
return start_range, end_range
|
|
|
|
end
|
|
|
|
|
|
|
|
return 0_i64, nil
|
|
|
|
end
|
2019-08-15 21:59:55 +05:30
|
|
|
|
|
|
|
def convert_theme(theme)
|
|
|
|
case theme
|
|
|
|
when "true"
|
|
|
|
"dark"
|
|
|
|
when "false"
|
|
|
|
"light"
|
|
|
|
when "", nil
|
|
|
|
nil
|
|
|
|
else
|
|
|
|
theme
|
|
|
|
end
|
|
|
|
end
|
2021-03-27 07:05:28 +05:30
|
|
|
|
2021-03-27 09:52:46 +05:30
|
|
|
def fetch_random_instance
|
2021-06-14 15:01:51 +05:30
|
|
|
begin
|
2021-06-16 14:02:33 +05:30
|
|
|
instance_api_client = HTTP::Client.new(URI.parse("https://api.invidious.io"))
|
2021-06-14 15:01:51 +05:30
|
|
|
|
|
|
|
# Timeouts
|
|
|
|
instance_api_client.connect_timeout = 10.seconds
|
|
|
|
instance_api_client.dns_timeout = 10.seconds
|
|
|
|
|
|
|
|
instance_list = JSON.parse(instance_api_client.get("/instances.json").body).as_a
|
|
|
|
instance_api_client.close
|
|
|
|
rescue Socket::ConnectError | IO::TimeoutError | JSON::ParseException
|
|
|
|
instance_list = [] of JSON::Any
|
|
|
|
end
|
2021-03-27 09:52:46 +05:30
|
|
|
|
|
|
|
filtered_instance_list = [] of String
|
2021-06-13 04:05:30 +05:30
|
|
|
|
2021-06-14 15:01:51 +05:30
|
|
|
instance_list.each do |data|
|
2021-06-13 04:05:30 +05:30
|
|
|
# TODO Check if current URL is onion instance and use .onion types if so.
|
2021-03-27 09:52:46 +05:30
|
|
|
if data[1]["type"] == "https"
|
2021-06-13 04:05:30 +05:30
|
|
|
# Instances can have statisitics disabled, which is an requirement of version validation.
|
2021-06-14 15:23:53 +05:30
|
|
|
# as_nil? doesn't exist. Thus we'll have to handle the error rasied if as_nil fails.
|
2021-06-13 04:05:30 +05:30
|
|
|
begin
|
|
|
|
data[1]["stats"].as_nil
|
|
|
|
next
|
|
|
|
rescue TypeCastError
|
|
|
|
end
|
|
|
|
|
|
|
|
# stats endpoint could also lack the software dict.
|
2021-06-14 15:23:53 +05:30
|
|
|
next if data[1]["stats"]["software"]?.nil?
|
2021-06-13 04:05:30 +05:30
|
|
|
|
2021-05-16 08:45:09 +05:30
|
|
|
# Makes sure the instance isn't too outdated.
|
2021-06-13 04:05:30 +05:30
|
|
|
if remote_version = data[1]["stats"]?.try &.["software"]?.try &.["version"]
|
|
|
|
remote_commit_date = remote_version.as_s.match(/\d{4}\.\d{2}\.\d{2}/)
|
|
|
|
next if !remote_commit_date
|
|
|
|
|
|
|
|
remote_commit_date = Time.parse(remote_commit_date[0], "%Y.%m.%d", Time::Location::UTC)
|
|
|
|
local_commit_date = Time.parse(CURRENT_VERSION, "%Y.%m.%d", Time::Location::UTC)
|
|
|
|
|
|
|
|
next if (remote_commit_date - local_commit_date).abs.days > 30
|
2021-05-16 08:45:09 +05:30
|
|
|
|
|
|
|
begin
|
2021-06-14 15:23:53 +05:30
|
|
|
data[1]["monitor"].as_nil
|
2021-05-16 09:40:53 +05:30
|
|
|
health = data[1]["monitor"].as_h["dailyRatios"][0].as_h["ratio"]
|
2021-05-16 08:45:09 +05:30
|
|
|
filtered_instance_list << data[0].as_s if health.to_s.to_f > 90
|
2021-06-13 04:05:30 +05:30
|
|
|
rescue TypeCastError
|
2021-05-16 08:45:09 +05:30
|
|
|
# We can't check the health if the monitoring is broken. Thus we'll just add it to the list
|
2021-06-13 04:05:30 +05:30
|
|
|
# and move on. Ideally we'll ignore any instance that has broken health monitoring but due to the fact that
|
|
|
|
# it's an error that often occurs with all the instances at the same time, we have to just skip the check.
|
2021-05-16 08:45:09 +05:30
|
|
|
filtered_instance_list << data[0].as_s
|
|
|
|
end
|
2021-03-27 09:52:46 +05:30
|
|
|
end
|
2021-03-27 07:05:28 +05:30
|
|
|
end
|
2021-03-27 09:52:46 +05:30
|
|
|
end
|
2021-06-13 04:05:30 +05:30
|
|
|
|
|
|
|
# If for some reason no instances managed to get fetched successfully then we'll just redirect to redirect.invidious.io
|
|
|
|
if filtered_instance_list.size == 0
|
|
|
|
return "redirect.invidious.io"
|
|
|
|
end
|
|
|
|
|
2021-03-27 09:52:46 +05:30
|
|
|
return filtered_instance_list.sample(1)[0]
|
2021-03-27 07:05:28 +05:30
|
|
|
end
|