forked from midou/invidious
Search/Trending: Fix duplicated results (#3773)
This commit is contained in:
commit
eefc8bbbdd
@ -10,7 +10,7 @@ module Invidious::Search
|
|||||||
initial_data = YoutubeAPI.search(query.text, search_params, client_config: client_config)
|
initial_data = YoutubeAPI.search(query.text, search_params, client_config: client_config)
|
||||||
|
|
||||||
items, _ = extract_items(initial_data)
|
items, _ = extract_items(initial_data)
|
||||||
return items
|
return items.reject!(Category)
|
||||||
end
|
end
|
||||||
|
|
||||||
# Search a youtube channel
|
# Search a youtube channel
|
||||||
@ -32,7 +32,7 @@ module Invidious::Search
|
|||||||
response_json = YoutubeAPI.browse(continuation)
|
response_json = YoutubeAPI.browse(continuation)
|
||||||
|
|
||||||
items, _ = extract_items(response_json, "", ucid)
|
items, _ = extract_items(response_json, "", ucid)
|
||||||
return items
|
return items.reject!(Category)
|
||||||
end
|
end
|
||||||
|
|
||||||
# Search inside of user subscriptions
|
# Search inside of user subscriptions
|
||||||
|
@ -113,7 +113,7 @@ module Invidious::Search
|
|||||||
|
|
||||||
case @type
|
case @type
|
||||||
when .regular?, .playlist?
|
when .regular?, .playlist?
|
||||||
items = unnest_items(Processors.regular(self))
|
items = Processors.regular(self)
|
||||||
#
|
#
|
||||||
when .channel?
|
when .channel?
|
||||||
items = Processors.channel(self)
|
items = Processors.channel(self)
|
||||||
@ -136,26 +136,5 @@ module Invidious::Search
|
|||||||
|
|
||||||
return params
|
return params
|
||||||
end
|
end
|
||||||
|
|
||||||
# TODO: clean code
|
|
||||||
private def unnest_items(all_items) : Array(SearchItem)
|
|
||||||
items = [] of SearchItem
|
|
||||||
|
|
||||||
# Light processing to flatten search results out of Categories.
|
|
||||||
# They should ideally be supported in the future.
|
|
||||||
all_items.each do |i|
|
|
||||||
if i.is_a? Category
|
|
||||||
i.contents.each do |nest_i|
|
|
||||||
if !nest_i.is_a? Video
|
|
||||||
items << nest_i
|
|
||||||
end
|
|
||||||
end
|
|
||||||
else
|
|
||||||
items << i
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
return items
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -17,7 +17,9 @@ def fetch_trending(trending_type, region, locale)
|
|||||||
|
|
||||||
client_config = YoutubeAPI::ClientConfig.new(region: region)
|
client_config = YoutubeAPI::ClientConfig.new(region: region)
|
||||||
initial_data = YoutubeAPI.browse("FEtrending", params: params, client_config: client_config)
|
initial_data = YoutubeAPI.browse("FEtrending", params: params, client_config: client_config)
|
||||||
trending = extract_videos(initial_data)
|
|
||||||
|
|
||||||
return {trending, plid}
|
items, _ = extract_items(initial_data)
|
||||||
|
|
||||||
|
# Return items, but ignore categories (e.g featured content)
|
||||||
|
return items.reject!(Category), plid
|
||||||
end
|
end
|
||||||
|
@ -68,19 +68,16 @@ rescue ex
|
|||||||
return false
|
return false
|
||||||
end
|
end
|
||||||
|
|
||||||
def extract_videos(initial_data : Hash(String, JSON::Any), author_fallback : String? = nil, author_id_fallback : String? = nil) : Array(SearchVideo)
|
# This function extracts the SearchItems from a Category.
|
||||||
extracted, _ = extract_items(initial_data, author_fallback, author_id_fallback)
|
# Categories are commonly returned in search results and trending pages.
|
||||||
|
def extract_category(category : Category) : Array(SearchVideo)
|
||||||
|
items = [] of SearchItem
|
||||||
|
|
||||||
target = [] of (SearchItem | Continuation)
|
category.contents.each do |item|
|
||||||
extracted.each do |i|
|
target << cate_i if item.is_a?(SearchItem)
|
||||||
if i.is_a?(Category)
|
|
||||||
i.contents.each { |cate_i| target << cate_i if !cate_i.is_a? Video }
|
|
||||||
else
|
|
||||||
target << i
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
return target.select(SearchVideo)
|
return items
|
||||||
end
|
end
|
||||||
|
|
||||||
def extract_selected_tab(tabs)
|
def extract_selected_tab(tabs)
|
||||||
|
Loading…
Reference in New Issue
Block a user