1
0

fix:pbar total

This commit is contained in:
guorong.zheng 2024-09-05 10:35:12 +08:00
parent f0ff80ffc5
commit 4f9319c86e
2 changed files with 11 additions and 56 deletions

51
main.py

@ -26,8 +26,6 @@ from flask import Flask, render_template_string
import sys
import shutil
# from collections import defaultdict
app = Flask(__name__)
@ -110,9 +108,9 @@ class UpdateSource:
int((self.pbar.n) / self.total) * 100,
)
def get_urls_len(self):
def get_urls_len(self, filter=False):
def process_cache_url(url):
if "$cache:" in url:
if filter and "$cache:" in url:
cache_part = url.split("$cache:", 1)[1]
return cache_part.split("?")[0]
return url
@ -147,7 +145,7 @@ class UpdateSource:
self.subscribe_result,
self.online_search_result,
)
self.total = self.get_urls_len()
self.total = self.get_urls_len(filter=True)
sort_callback = lambda: self.pbar_update(name="测速")
if config.getboolean("Settings", "open_sort"):
self.update_progress(
@ -160,48 +158,7 @@ class UpdateSource:
self.channel_data,
callback=sort_callback,
)
# no_result_cate_names = [
# (cate, name)
# for cate, channel_obj in self.channel_data.items()
# for name, info_list in channel_obj.items()
# if len(info_list) < 3
# ]
# no_result_names = [name for (_, name) in no_result_cate_names]
# if no_result_names:
# print(
# f"Not enough url found for {', '.join(no_result_names)}, try a supplementary multicast search..."
# )
# sup_results = await get_channels_by_multicast(
# no_result_names, self.update_progress
# )
# sup_channel_items = defaultdict(lambda: defaultdict(list))
# for cate, name in no_result_cate_names:
# data = sup_results.get(name)
# if data:
# sup_channel_items[cate][name] = data
# self.total = len(
# [
# url
# for obj in sup_channel_items.values()
# for url_list in obj.values()
# for url in url_list
# ]
# )
# if self.total > 0 and config.getboolean("Settings", "open_sort"):
# self.update_progress(
# f"正在对补充频道测速排序, 共{len([name for obj in sup_channel_items.values() for name in obj.keys()])}个频道, 含{self.total}个接口",
# 0,
# )
# self.start_time = time()
# self.pbar = tqdm_asyncio(total=self.total, desc="Sorting")
# sup_channel_items = await process_sort_channel_list(
# sup_channel_items,
# callback=sort_callback,
# )
# self.channel_data = merge_objects(
# self.channel_data, sup_channel_items
# )
# self.total = self.get_urls_len()
self.total = self.get_urls_len()
self.pbar = tqdm(total=self.total, desc="Writing")
self.start_time = time()
write_channel_to_file(

@ -122,17 +122,15 @@ async def get_speed_by_info(url_info, ffmpeg, semaphore, callback=None):
async with semaphore:
url, _, _ = url_info
url_info = list(url_info)
url_split = None
cache_key = None
if "$" in url:
url_split = url.split("$", 1)
url = url_split[0]
url, cache_info = url.split("$", 1)
if "cache:" in cache_info:
cache_key = cache_info.replace("cache:", "")
if cache_key in speed_cache:
return tuple(url_info), speed_cache[cache_key]
url = quote(url, safe=":/?&=$[]")
url_info[0] = url
if url_split and url_split[1] and "cache:" in url_split[1]:
cache_key = url_split[1].replace("cache:", "")
if speed_cache.get(cache_key):
return (tuple(url_info), speed_cache[cache_key])
try:
if ".m3u8" not in url and ffmpeg:
speed = await check_stream_speed(url_info)
@ -144,13 +142,13 @@ async def get_speed_by_info(url_info, ffmpeg, semaphore, callback=None):
if url_speed != float("inf")
else float("inf")
)
if cache_key and speed_cache.get(cache_key) is None:
if cache_key and cache_key not in speed_cache:
speed_cache[cache_key] = url_speed
return speed
except Exception:
return float("inf")
finally:
if callback:
if callback and (cache_key is None or cache_key not in speed_cache):
callback()