commit
83022a5f45
@ -9,7 +9,7 @@ online_search_page_num = 3
|
||||
urls_limit = 15
|
||||
open_keep_all = False
|
||||
open_sort = True
|
||||
open_ffmpeg = False
|
||||
open_ffmpeg = True
|
||||
response_time_weight = 0.5
|
||||
resolution_weight = 0.5
|
||||
recent_days = 30
|
||||
@ -25,8 +25,8 @@ multicast_page_num = 3
|
||||
open_proxy = False
|
||||
open_driver = True
|
||||
open_hotel = True
|
||||
open_hotel_tonkiang = False
|
||||
open_hotel_tonkiang = True
|
||||
open_hotel_fofa = True
|
||||
hotel_region_list = 广东
|
||||
hotel_page_num = 5
|
||||
hotel_region_list = 全部
|
||||
hotel_page_num = 3
|
||||
|
||||
|
53
main.py
53
main.py
@ -60,10 +60,10 @@ class UpdateSource:
|
||||
self.run_ui = False
|
||||
self.tasks = []
|
||||
self.channel_items = {}
|
||||
self.subscribe_result = {}
|
||||
self.multicast_result = {}
|
||||
self.hotel_tonkiang_result = {}
|
||||
self.hotel_fofa_result = {}
|
||||
self.hotel_tonkiang_result = {}
|
||||
self.multicast_result = {}
|
||||
self.subscribe_result = {}
|
||||
self.online_search_result = {}
|
||||
self.channel_data = {}
|
||||
self.pbar = None
|
||||
@ -73,10 +73,10 @@ class UpdateSource:
|
||||
|
||||
async def visit_page(self, channel_names=None):
|
||||
tasks_config = [
|
||||
("open_subscribe", get_channels_by_subscribe_urls, "subscribe_result"),
|
||||
("open_hotel_fofa", get_channels_by_fofa, "hotel_fofa_result"),
|
||||
("open_multicast", get_channels_by_multicast, "multicast_result"),
|
||||
("open_hotel_tonkiang", get_channels_by_hotel, "hotel_tonkiang_result"),
|
||||
("open_hotel_fofa", get_channels_by_fofa, "hotel_fofa_result"),
|
||||
("open_subscribe", get_channels_by_subscribe_urls, "subscribe_result"),
|
||||
(
|
||||
"open_online_search",
|
||||
get_channels_by_online_search,
|
||||
@ -93,7 +93,7 @@ class UpdateSource:
|
||||
if setting == "open_subscribe":
|
||||
subscribe_urls = config.get("Settings", "subscribe_urls").split(",")
|
||||
task = asyncio.create_task(
|
||||
task_func(urls=subscribe_urls, callback=self.update_progress)
|
||||
task_func(subscribe_urls, callback=self.update_progress)
|
||||
)
|
||||
elif setting == "open_hotel_tonkiang" or setting == "open_hotel_fofa":
|
||||
task = asyncio.create_task(task_func(self.update_progress))
|
||||
@ -108,7 +108,7 @@ class UpdateSource:
|
||||
if not n:
|
||||
self.pbar.update()
|
||||
self.update_progress(
|
||||
f"正在进行{name}, 剩余{self.total - (n or self.pbar.n)}个频道, 预计剩余时间: {get_pbar_remaining(n=(n or self.pbar.n), total=self.total, start_time=self.start_time)}",
|
||||
f"正在进行{name}, 剩余{self.total - (n or self.pbar.n)}个接口, 预计剩余时间: {get_pbar_remaining(n=(n or self.pbar.n), total=self.total, start_time=self.start_time)}",
|
||||
int(((n or self.pbar.n) / self.total) * 100),
|
||||
)
|
||||
|
||||
@ -126,42 +126,48 @@ class UpdateSource:
|
||||
for channel_obj in self.channel_items.values()
|
||||
for name in channel_obj.keys()
|
||||
]
|
||||
self.total = len(channel_names)
|
||||
await self.visit_page(channel_names)
|
||||
self.tasks = []
|
||||
channel_items_obj_items = self.channel_items.items()
|
||||
self.channel_data = append_total_data(
|
||||
channel_items_obj_items,
|
||||
self.channel_data,
|
||||
self.subscribe_result,
|
||||
self.hotel_fofa_result,
|
||||
self.multicast_result,
|
||||
self.hotel_tonkiang_result,
|
||||
self.hotel_fofa_result,
|
||||
self.subscribe_result,
|
||||
self.online_search_result,
|
||||
)
|
||||
channel_urls = [
|
||||
url
|
||||
for channel_obj in self.channel_data.values()
|
||||
for url_list in channel_obj.values()
|
||||
for url in url_list
|
||||
]
|
||||
self.total = len(channel_urls)
|
||||
if config.getboolean("Settings", "open_sort"):
|
||||
self.update_progress(
|
||||
f"正在测速排序, 共{self.total}个频道",
|
||||
f"正在测速排序, 共{self.total}个接口",
|
||||
0,
|
||||
)
|
||||
self.start_time = time()
|
||||
self.pbar = tqdm_asyncio(total=self.total, desc="Sorting")
|
||||
self.sort_n = 0
|
||||
self.channel_data = await process_sort_channel_list(
|
||||
data=self.channel_data, callback=self.sort_pbar_update
|
||||
self.channel_data, callback=self.sort_pbar_update
|
||||
)
|
||||
no_result_cate_names = [
|
||||
(cate, name)
|
||||
for cate, channel_obj in self.channel_data.items()
|
||||
for name, info_list in channel_obj.items()
|
||||
if not info_list
|
||||
if len(info_list) < 3
|
||||
]
|
||||
no_result_names = [name for (_, name) in no_result_cate_names]
|
||||
if no_result_names:
|
||||
print(
|
||||
f"No result found for {', '.join(no_result_names)}, try a supplementary online search..."
|
||||
f"Not enough url found for {', '.join(no_result_names)}, try a supplementary multicast search..."
|
||||
)
|
||||
sup_results = await get_channels_by_online_search(
|
||||
sup_results = await get_channels_by_multicast(
|
||||
no_result_names, self.update_progress
|
||||
)
|
||||
sup_channel_items = defaultdict(lambda: defaultdict(list))
|
||||
@ -170,29 +176,34 @@ class UpdateSource:
|
||||
if data:
|
||||
sup_channel_items[cate][name] = data
|
||||
self.total = len(
|
||||
[name for obj in sup_channel_items.values() for name in obj.keys()]
|
||||
[
|
||||
url
|
||||
for obj in sup_channel_items.values()
|
||||
for url_list in obj.values()
|
||||
for url in url_list
|
||||
]
|
||||
)
|
||||
if self.total > 0 and config.getboolean("Settings", "open_sort"):
|
||||
self.update_progress(
|
||||
f"正在对补充频道测速排序, 共{self.total}个频道",
|
||||
f"正在对补充频道测速排序, 共{len([name for obj in sup_channel_items.values() for name in obj.keys()])}个频道, 含{self.total}个接口",
|
||||
0,
|
||||
)
|
||||
self.start_time = time()
|
||||
self.pbar = tqdm_asyncio(total=self.total, desc="Sorting")
|
||||
self.sort_n = 0
|
||||
sup_channel_items = await process_sort_channel_list(
|
||||
data=sup_channel_items,
|
||||
sup_channel_items,
|
||||
callback=self.sort_pbar_update,
|
||||
)
|
||||
self.channel_data = merge_objects(
|
||||
self.channel_data, sup_channel_items
|
||||
)
|
||||
self.total = len(channel_names)
|
||||
self.total = len(channel_urls)
|
||||
self.pbar = tqdm(total=self.total, desc="Writing")
|
||||
self.start_time = time()
|
||||
write_channel_to_file(
|
||||
items=channel_items_obj_items,
|
||||
data=self.channel_data,
|
||||
channel_items_obj_items,
|
||||
self.channel_data,
|
||||
callback=lambda: self.pbar_update(name="写入结果"),
|
||||
)
|
||||
self.pbar.close()
|
||||
|
@ -125,7 +125,7 @@ async def get_multicast_region_result():
|
||||
"""
|
||||
multicast_region_urls_info = get_multicast_urls_info_from_region_list()
|
||||
multicast_result = await get_channels_by_subscribe_urls(
|
||||
urls=multicast_region_urls_info, multicast=True
|
||||
multicast_region_urls_info, multicast=True
|
||||
)
|
||||
with open(
|
||||
resource_path("updates/multicast/multicast_region_result.json"),
|
||||
|
@ -13,7 +13,7 @@ timeout = 30
|
||||
|
||||
|
||||
async def get_channels_by_subscribe_urls(
|
||||
urls=None, multicast=False, retry=True, error_print=True, callback=None
|
||||
urls, multicast=False, retry=True, error_print=True, callback=None
|
||||
):
|
||||
"""
|
||||
Get the channels by subscribe urls
|
||||
|
@ -106,6 +106,8 @@ def format_channel_name(name):
|
||||
"""
|
||||
if config.getboolean("Settings", "open_keep_all"):
|
||||
return name
|
||||
cc = OpenCC("t2s")
|
||||
name = cc.convert(name)
|
||||
sub_pattern = (
|
||||
r"-|_|\((.*?)\)|\[(.*?)\]| |频道|标清|高清|HD|hd|超清|超高|超高清|中央|央视|台"
|
||||
)
|
||||
@ -150,10 +152,9 @@ def channel_name_is_equal(name1, name2):
|
||||
"""
|
||||
if config.getboolean("Settings", "open_keep_all"):
|
||||
return True
|
||||
cc = OpenCC("t2s")
|
||||
name1_converted = cc.convert(format_channel_name(name1))
|
||||
name2_converted = cc.convert(format_channel_name(name2))
|
||||
return name1_converted == name2_converted
|
||||
name1_format = format_channel_name(name1)
|
||||
name2_format = format_channel_name(name2)
|
||||
return name1_format == name2_format
|
||||
|
||||
|
||||
def get_channel_results_by_name(name, data):
|
||||
@ -161,12 +162,10 @@ def get_channel_results_by_name(name, data):
|
||||
Get channel results from data by name
|
||||
"""
|
||||
format_name = format_channel_name(name)
|
||||
cc1 = OpenCC("s2t")
|
||||
converted1 = cc1.convert(format_name)
|
||||
cc2 = OpenCC("t2s")
|
||||
converted2 = cc2.convert(format_name)
|
||||
result1 = data.get(converted1, [])
|
||||
result2 = data.get(converted2, [])
|
||||
cc = OpenCC("s2t")
|
||||
name_s2t = cc.convert(format_name)
|
||||
result1 = data.get(format_name, [])
|
||||
result2 = data.get(name_s2t, [])
|
||||
results = list(dict.fromkeys(result1 + result2))
|
||||
return results
|
||||
|
||||
@ -394,7 +393,7 @@ def get_results_from_multicast_soup_requests(soup, hotel=False):
|
||||
return results
|
||||
|
||||
|
||||
def update_channel_urls_txt(cate, name, urls):
|
||||
def update_channel_urls_txt(cate, name, urls, callback=None):
|
||||
"""
|
||||
Update the category and channel urls to the final file
|
||||
"""
|
||||
@ -413,6 +412,8 @@ def update_channel_urls_txt(cate, name, urls):
|
||||
for url in urls:
|
||||
if url is not None:
|
||||
f.write(name + "," + url + "\n")
|
||||
if callback:
|
||||
callback()
|
||||
|
||||
|
||||
def get_channel_url(text):
|
||||
@ -497,10 +498,10 @@ def append_total_data(*args, **kwargs):
|
||||
def append_all_method_data(
|
||||
items,
|
||||
data,
|
||||
subscribe_result=None,
|
||||
hotel_fofa_result=None,
|
||||
multicast_result=None,
|
||||
hotel_tonkiang_result=None,
|
||||
hotel_fofa_result=None,
|
||||
subscribe_result=None,
|
||||
online_search_result=None,
|
||||
):
|
||||
"""
|
||||
@ -509,10 +510,10 @@ def append_all_method_data(
|
||||
for cate, channel_obj in items:
|
||||
for name, old_urls in channel_obj.items():
|
||||
for method, result in [
|
||||
("subscribe", subscribe_result),
|
||||
("hotel_fofa", hotel_fofa_result),
|
||||
("multicast", multicast_result),
|
||||
("hotel_tonkiang", hotel_tonkiang_result),
|
||||
("hotel_fofa", hotel_fofa_result),
|
||||
("subscribe", subscribe_result),
|
||||
("online_search", online_search_result),
|
||||
]:
|
||||
if config.getboolean("Settings", f"open_{method}"):
|
||||
@ -554,10 +555,10 @@ def append_all_method_data(
|
||||
def append_all_method_data_keep_all(
|
||||
items,
|
||||
data,
|
||||
subscribe_result=None,
|
||||
hotel_fofa_result=None,
|
||||
multicast_result=None,
|
||||
hotel_tonkiang_result=None,
|
||||
hotel_fofa_result=None,
|
||||
subscribe_result=None,
|
||||
online_search_result=None,
|
||||
):
|
||||
"""
|
||||
@ -565,10 +566,10 @@ def append_all_method_data_keep_all(
|
||||
"""
|
||||
for cate, channel_obj in items:
|
||||
for method, result in [
|
||||
("subscribe", subscribe_result),
|
||||
("hotel_fofa", hotel_fofa_result),
|
||||
("multicast", multicast_result),
|
||||
("hotel_tonkiang", hotel_tonkiang_result),
|
||||
("hotel_fofa", hotel_fofa_result),
|
||||
("subscribe", subscribe_result),
|
||||
("online_search", online_search_result),
|
||||
]:
|
||||
if result and config.getboolean("Settings", f"open_{method}"):
|
||||
@ -592,7 +593,7 @@ def append_all_method_data_keep_all(
|
||||
|
||||
|
||||
async def sort_channel_list(
|
||||
semaphore=None, cate=None, name=None, info_list=None, ffmpeg=False, callback=None
|
||||
cate, name, info_list, semaphore, ffmpeg=False, callback=None
|
||||
):
|
||||
"""
|
||||
Sort the channel list
|
||||
@ -602,7 +603,7 @@ async def sort_channel_list(
|
||||
try:
|
||||
if info_list:
|
||||
sorted_data = await sort_urls_by_speed_and_resolution(
|
||||
data=info_list, ffmpeg=ffmpeg
|
||||
info_list, ffmpeg=ffmpeg, callback=callback
|
||||
)
|
||||
if sorted_data:
|
||||
for (
|
||||
@ -620,12 +621,10 @@ async def sort_channel_list(
|
||||
except Exception as e:
|
||||
logging.error(f"Error: {e}")
|
||||
finally:
|
||||
if callback:
|
||||
callback()
|
||||
return {"cate": cate, "name": name, "data": data}
|
||||
|
||||
|
||||
async def process_sort_channel_list(data=None, callback=None):
|
||||
async def process_sort_channel_list(data, callback=None):
|
||||
"""
|
||||
Processs the sort channel list
|
||||
"""
|
||||
@ -634,14 +633,14 @@ async def process_sort_channel_list(data=None, callback=None):
|
||||
if open_ffmpeg and not ffmpeg_installed:
|
||||
print("FFmpeg is not installed, using requests for sorting.")
|
||||
is_ffmpeg = open_ffmpeg and ffmpeg_installed
|
||||
semaphore = asyncio.Semaphore(1 if is_ffmpeg else 100)
|
||||
semaphore = asyncio.Semaphore(3)
|
||||
tasks = [
|
||||
asyncio.create_task(
|
||||
sort_channel_list(
|
||||
semaphore=semaphore,
|
||||
cate=cate,
|
||||
name=name,
|
||||
info_list=info_list,
|
||||
cate,
|
||||
name,
|
||||
info_list,
|
||||
semaphore,
|
||||
ffmpeg=is_ffmpeg,
|
||||
callback=callback,
|
||||
)
|
||||
@ -660,20 +659,16 @@ async def process_sort_channel_list(data=None, callback=None):
|
||||
return data
|
||||
|
||||
|
||||
def write_channel_to_file(items=None, data=None, callback=None):
|
||||
def write_channel_to_file(items, data, callback=None):
|
||||
"""
|
||||
Write channel to file
|
||||
"""
|
||||
for cate, channel_obj in items:
|
||||
for name in channel_obj.keys():
|
||||
info_list = data.get(cate, {}).get(name, [])
|
||||
try:
|
||||
channel_urls = get_total_urls_from_info_list(info_list)
|
||||
print("write:", cate, name, "num:", len(channel_urls))
|
||||
update_channel_urls_txt(cate, name, channel_urls)
|
||||
finally:
|
||||
if callback:
|
||||
callback()
|
||||
channel_urls = get_total_urls_from_info_list(info_list)
|
||||
print("write:", cate, name, "num:", len(channel_urls))
|
||||
update_channel_urls_txt(cate, name, channel_urls, callback=callback)
|
||||
for handler in logging.root.handlers[:]:
|
||||
handler.close()
|
||||
logging.root.removeHandler(handler)
|
||||
|
@ -43,7 +43,7 @@ def is_ffmpeg_installed():
|
||||
return False
|
||||
|
||||
|
||||
async def ffmpeg_url(url, timeout=timeout):
|
||||
async def ffmpeg_url(url):
|
||||
"""
|
||||
Get url info by ffmpeg
|
||||
"""
|
||||
@ -97,7 +97,7 @@ async def check_stream_speed(url_info):
|
||||
"""
|
||||
try:
|
||||
url = url_info[0]
|
||||
video_info = await ffmpeg_url(url, timeout=timeout)
|
||||
video_info = await ffmpeg_url(url)
|
||||
if video_info is None:
|
||||
return float("inf")
|
||||
frame, resolution = get_video_info(video_info)
|
||||
@ -112,37 +112,45 @@ async def check_stream_speed(url_info):
|
||||
return float("inf")
|
||||
|
||||
|
||||
async def get_info_with_speed(url_info):
|
||||
async def get_speed_by_info(url_info, ffmpeg, semaphore, callback=None):
|
||||
"""
|
||||
Get the info with speed
|
||||
"""
|
||||
url, _, _ = url_info
|
||||
url_info = list(url_info)
|
||||
if "$" in url:
|
||||
url = url.split("$")[0]
|
||||
url = quote(url, safe=":/?&=$[]")
|
||||
url_info[0] = url
|
||||
try:
|
||||
speed = await check_stream_speed(url_info)
|
||||
return speed
|
||||
except Exception:
|
||||
return float("inf")
|
||||
async with semaphore:
|
||||
url, _, _ = url_info
|
||||
url_info = list(url_info)
|
||||
if "$" in url:
|
||||
url = url.split("$")[0]
|
||||
url = quote(url, safe=":/?&=$[]")
|
||||
url_info[0] = url
|
||||
try:
|
||||
if ".m3u8" not in url and ffmpeg:
|
||||
speed = await check_stream_speed(url_info)
|
||||
return speed
|
||||
else:
|
||||
speed = await get_speed(url)
|
||||
return (
|
||||
(tuple(url_info), speed) if speed != float("inf") else float("inf")
|
||||
)
|
||||
except Exception:
|
||||
return float("inf")
|
||||
finally:
|
||||
if callback:
|
||||
callback()
|
||||
|
||||
|
||||
async def sort_urls_by_speed_and_resolution(data=None, ffmpeg=False):
|
||||
async def sort_urls_by_speed_and_resolution(data, ffmpeg=False, callback=None):
|
||||
"""
|
||||
Sort by speed and resolution
|
||||
"""
|
||||
if ffmpeg:
|
||||
response = await asyncio.gather(
|
||||
*(get_info_with_speed(url_info) for url_info in data)
|
||||
semaphore = asyncio.Semaphore(10)
|
||||
response = await asyncio.gather(
|
||||
*(
|
||||
get_speed_by_info(url_info, ffmpeg, semaphore, callback=callback)
|
||||
for url_info in data
|
||||
)
|
||||
valid_response = [res for res in response if res != float("inf")]
|
||||
else:
|
||||
response_times = await asyncio.gather(*(get_speed(url) for url, _, _ in data))
|
||||
valid_response = [
|
||||
(info, rt) for info, rt in zip(data, response_times) if rt != float("inf")
|
||||
]
|
||||
)
|
||||
valid_response = [res for res in response if res != float("inf")]
|
||||
|
||||
def extract_resolution(resolution_str):
|
||||
numbers = re.findall(r"\d+x\d+", resolution_str)
|
||||
|
Loading…
x
Reference in New Issue
Block a user