chore:url cache
This commit is contained in:
parent
831bd4b5f9
commit
fdb54edf5f
@ -53,12 +53,13 @@ async def get_channels_by_fofa(urls=None, multicast=False, callback=None):
|
||||
proxy = None
|
||||
open_proxy = config.getboolean("Settings", "open_proxy")
|
||||
open_driver = config.getboolean("Settings", "open_driver")
|
||||
open_sort = config.getboolean("Settings", "open_sort")
|
||||
if open_proxy:
|
||||
test_url = fofa_urls[0][0] if multicast else fofa_urls[0]
|
||||
proxy = await get_proxy(test_url, best=True, with_test=True)
|
||||
|
||||
def process_fofa_channels(fofa_info):
|
||||
nonlocal proxy, fofa_urls_len, open_driver
|
||||
nonlocal proxy, fofa_urls_len, open_driver, open_sort
|
||||
fofa_url = fofa_info[0] if multicast else fofa_info
|
||||
results = defaultdict(lambda: defaultdict(list))
|
||||
try:
|
||||
@ -88,7 +89,8 @@ async def get_channels_by_fofa(urls=None, multicast=False, callback=None):
|
||||
else:
|
||||
with ThreadPoolExecutor(max_workers=100) as executor:
|
||||
futures = [
|
||||
executor.submit(process_fofa_json_url, url) for url in urls
|
||||
executor.submit(process_fofa_json_url, url, open_sort)
|
||||
for url in urls
|
||||
]
|
||||
for future in futures:
|
||||
results = merge_objects(results, future.result())
|
||||
@ -120,7 +122,7 @@ async def get_channels_by_fofa(urls=None, multicast=False, callback=None):
|
||||
return fofa_results
|
||||
|
||||
|
||||
def process_fofa_json_url(url):
|
||||
def process_fofa_json_url(url, open_sort):
|
||||
"""
|
||||
Process the FOFA json url
|
||||
"""
|
||||
@ -141,7 +143,11 @@ def process_fofa_json_url(url):
|
||||
item_name = format_channel_name(item.get("name"))
|
||||
item_url = item.get("url").strip()
|
||||
if item_name and item_url:
|
||||
total_url = f"{url}{item_url}$cache:{url}"
|
||||
total_url = (
|
||||
f"{url}{item_url}$cache:{url}"
|
||||
if open_sort
|
||||
else f"{url}{item_url}"
|
||||
)
|
||||
if item_name not in channels:
|
||||
channels[item_name] = [(total_url, None, None)]
|
||||
else:
|
||||
|
@ -178,8 +178,9 @@ async def get_channels_by_hotel(callback=None):
|
||||
for result in search_region_result.values()
|
||||
for url, _, _ in result
|
||||
]
|
||||
open_sort = config.getboolean("Settings", "open_sort")
|
||||
channels = await get_channels_by_subscribe_urls(
|
||||
urls, hotel=True, retry=False, error_print=False, with_cache=True
|
||||
urls, hotel=True, retry=False, error_print=False, with_cache=open_sort
|
||||
)
|
||||
if not open_driver:
|
||||
close_session()
|
||||
|
@ -234,10 +234,15 @@ def get_channel_multicast_result(result, search_result):
|
||||
Get the channel multicast info result by result and search result
|
||||
"""
|
||||
info_result = {}
|
||||
open_sort = config.getboolean("Settings", "open_sort")
|
||||
for name, result_obj in result.items():
|
||||
info_list = [
|
||||
(
|
||||
f"http://{url}/rtp/{ip}$cache:{result_region}_{result_type}",
|
||||
(
|
||||
f"http://{url}/rtp/{ip}$cache:{result_region}_{result_type}"
|
||||
if open_sort
|
||||
else f"http://{url}/rtp/{ip}"
|
||||
),
|
||||
date,
|
||||
resolution,
|
||||
)
|
||||
|
@ -127,10 +127,10 @@ async def get_speed_by_info(url_info, ffmpeg, semaphore, callback=None):
|
||||
url, cache_info = url.split("$", 1)
|
||||
if "cache:" in cache_info:
|
||||
cache_key = cache_info.replace("cache:", "")
|
||||
if cache_key in speed_cache:
|
||||
return tuple(url_info), speed_cache[cache_key]
|
||||
url = quote(url, safe=":/?&=$[]")
|
||||
url_info[0] = url
|
||||
if cache_key in speed_cache:
|
||||
return (tuple(url_info), speed_cache[cache_key])
|
||||
try:
|
||||
if ".m3u8" not in url and ffmpeg:
|
||||
speed = await check_stream_speed(url_info)
|
||||
|
Loading…
x
Reference in New Issue
Block a user