feat:fofa multicast
This commit is contained in:
parent
daa9fd15de
commit
c6fb90d126
@ -19,10 +19,12 @@ url_keywords_blacklist =
|
||||
open_subscribe = False
|
||||
subscribe_urls = https://m3u.ibert.me/txt/fmml_dv6.txt,https://m3u.ibert.me/txt/o_cn.txt,https://m3u.ibert.me/txt/j_iptv.txt,https://github.moeyy.xyz/https://raw.githubusercontent.com/PizazzGY/TVBox/main/live.txt
|
||||
open_multicast = True
|
||||
open_multicast_tonkiang = True
|
||||
open_multicast_fofa = True
|
||||
multicast_region_list = 全部
|
||||
multicast_page_num = 3
|
||||
open_proxy = False
|
||||
open_driver = False
|
||||
open_driver = True
|
||||
open_hotel = True
|
||||
open_hotel_tonkiang = False
|
||||
open_hotel_fofa = True
|
||||
|
@ -1,31 +1,33 @@
|
||||
| 配置项 | 默认值 | 描述 |
|
||||
| ---------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------- |
|
||||
| open_update | True | 开启更新,若关闭则只运行结果页面服务 |
|
||||
| open_use_old_result | True | 开启使用历史更新结果,合并至本次更新中 |
|
||||
| open_driver | True | 开启浏览器运行,若更新无数据可开启此模式,较消耗性能 |
|
||||
| open_proxy | False | 开启代理,自动获取免费可用代理,若更新无数据可开启此模式 |
|
||||
| source_file | config/demo.txt | 模板文件路径 |
|
||||
| final_file | output/result.txt | 生成结果文件路径 |
|
||||
| open_online_search | False | 开启线上检索源功能 |
|
||||
| online_search_page_num | 3 | 在线检索频道获取分页数量 |
|
||||
| urls_limit | 15 | 单个频道接口数量 |
|
||||
| open_keep_all | False | 保留所有检索结果,会保留非模板频道名称的结果,推荐手动维护时开启 |
|
||||
| open_sort | True | 开启排序功能(响应速度、日期、分辨率) |
|
||||
| open_ffmpeg | True | 开启使用 FFmpeg 进行测速,获取更准确的速度与分辨率信息,需要提前手动安装 |
|
||||
| open_m3u_result | True | 开启转换生成 m3u 文件类型结果链接,支持显示频道图标 |
|
||||
| response_time_weight | 0.5 | 响应时间权重值(所有权重值总和应为 1) |
|
||||
| resolution_weight | 0.5 | 分辨率权重值 (所有权重值总和应为 1) |
|
||||
| recent_days | 30 | 获取最近时间范围内更新的接口(单位天),适当减小可避免出现匹配问题 |
|
||||
| ipv_type | ipv4 | 生成结果中接口的类型,可选值:ipv4、ipv6、全部 |
|
||||
| domain_blacklist | epg.pw | 接口域名黑名单,用于过滤低质量含广告类域名的接口 |
|
||||
| url_keywords_blacklist | | 接口关键字黑名单,用于过滤含特定字符的接口 |
|
||||
| open_subscribe | False | 开启订阅源功能 |
|
||||
| subscribe_urls | https://m3u.ibert.me/txt/fmml_dv6.txt,<br>https://m3u.ibert.me/txt/o_cn.txt,<br>https://m3u.ibert.me/txt/j_iptv.txt,<br>https://github.moeyy.xyz/https://raw.githubusercontent.com/PizazzGY/TVBox/main/live.txt | 订阅源列表 |
|
||||
| open_multicast | False | 开启组播源功能 |
|
||||
| multicast_region_list | 广东 | 组播源地区列表,[更多地区](../updates/multicast/multicast_map.json),"全部"表示所有地区 |
|
||||
| multicast_page_num | 3 | 组播地区获取分页数量 |
|
||||
| open_hotel | True | 开启酒店源功能 |
|
||||
| open_hotel_tonkiang | False | 开启 Tonkiang 酒店源工作模式 |
|
||||
| open_hotel_fofa | True | 开启 FOFA、ZoomEye 酒店源工作模式 |
|
||||
| hotel_region_list | 全部 | 酒店源地区列表,[更多地区](../updates/fofa/fofa_map.py),"全部"表示所有地区 |
|
||||
| hotel_page_num | 3 | 酒店地区获取分页数量 |
|
||||
| 配置项 | 默认值 | 描述 |
|
||||
| ----------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------- |
|
||||
| open_update | True | 开启更新,若关闭则只运行结果页面服务 |
|
||||
| open_use_old_result | True | 开启使用历史更新结果,合并至本次更新中 |
|
||||
| open_driver | True | 开启浏览器运行,若更新无数据可开启此模式,较消耗性能 |
|
||||
| open_proxy | False | 开启代理,自动获取免费可用代理,若更新无数据可开启此模式 |
|
||||
| source_file | config/demo.txt | 模板文件路径 |
|
||||
| final_file | output/result.txt | 生成结果文件路径 |
|
||||
| open_online_search | False | 开启线上检索源功能 |
|
||||
| online_search_page_num | 3 | 在线检索频道获取分页数量 |
|
||||
| urls_limit | 15 | 单个频道接口数量 |
|
||||
| open_keep_all | False | 保留所有检索结果,会保留非模板频道名称的结果,推荐手动维护时开启 |
|
||||
| open_sort | True | 开启排序功能(响应速度、日期、分辨率) |
|
||||
| open_ffmpeg | True | 开启使用 FFmpeg 进行测速,获取更准确的速度与分辨率信息,需要提前手动安装 |
|
||||
| open_m3u_result | True | 开启转换生成 m3u 文件类型结果链接,支持显示频道图标 |
|
||||
| response_time_weight | 0.5 | 响应时间权重值(所有权重值总和应为 1) |
|
||||
| resolution_weight | 0.5 | 分辨率权重值 (所有权重值总和应为 1) |
|
||||
| recent_days | 30 | 获取最近时间范围内更新的接口(单位天),适当减小可避免出现匹配问题 |
|
||||
| ipv_type | ipv4 | 生成结果中接口的类型,可选值:ipv4、ipv6、全部 |
|
||||
| domain_blacklist | epg.pw | 接口域名黑名单,用于过滤低质量含广告类域名的接口 |
|
||||
| url_keywords_blacklist | | 接口关键字黑名单,用于过滤含特定字符的接口 |
|
||||
| open_subscribe | False | 开启订阅源功能 |
|
||||
| subscribe_urls | https://m3u.ibert.me/txt/fmml_dv6.txt,<br>https://m3u.ibert.me/txt/o_cn.txt,<br>https://m3u.ibert.me/txt/j_iptv.txt,<br>https://github.moeyy.xyz/https://raw.githubusercontent.com/PizazzGY/TVBox/main/live.txt | 订阅源列表 |
|
||||
| open_multicast | True | 开启组播源功能 |
|
||||
| open_multicast_tonkiang | True | 开启 Tonkiang 组播源工作模式 |
|
||||
| open_multicast_fofa | True | 开启 FOFA 组播源工作模式 |
|
||||
| multicast_region_list | 全部 | 组播源地区列表,[更多地区](../updates/multicast/multicast_map.json),"全部"表示所有地区 |
|
||||
| multicast_page_num | 3 | 组播地区获取分页数量 |
|
||||
| open_hotel | True | 开启酒店源功能 |
|
||||
| open_hotel_tonkiang | False | 开启 Tonkiang 酒店源工作模式 |
|
||||
| open_hotel_fofa | True | 开启 FOFA、ZoomEye 酒店源工作模式 |
|
||||
| hotel_region_list | 全部 | 酒店源地区列表,[更多地区](../updates/fofa/fofa_map.py),"全部"表示所有地区 |
|
||||
| hotel_page_num | 3 | 酒店地区获取分页数量 |
|
||||
|
@ -1,31 +1,33 @@
|
||||
| Configuration Item | Default Value | Description |
|
||||
| ---------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| open_update | True | Enable updates, if disabled then only the result page service is run |
|
||||
| open_use_old_result | True | Enable the use of historical update results and merge them into the current update |
|
||||
| open_driver | True | Enable browser execution, If there are no updates, this mode can be enabled, which consumes more performance |
|
||||
| open_proxy | False | Enable proxy, automatically obtains free available proxies, If there are no updates, this mode can be enabled |
|
||||
| source_file | config/demo.txt | Template file path |
|
||||
| final_file | output/result.txt | Generated result file path |
|
||||
| open_online_search | False | Enable online search source feature |
|
||||
| online_search_page_num | 3 | Page retrieval quantity for online search channels |
|
||||
| urls_limit | 15 | Number of interfaces per channel |
|
||||
| open_keep_all | False | Retain all search results, retain results with non-template channel names, recommended to be turned on when manually maintaining |
|
||||
| open_sort | True | Enable the sorting function (response speed, date, resolution) |
|
||||
| open_ffmpeg | True | Enable speed testing using FFmpeg to obtain more accurate speed and resolution information. Manual installation is required in advance. |
|
||||
| open_m3u_result | True | Enable the conversion to generate m3u file type result links, supporting the display of channel icons |
|
||||
| response_time_weight | 0.5 | Response time weight value (the sum of all weight values should be 1) |
|
||||
| resolution_weight | 0.5 | Resolution weight value (the sum of all weight values should be 1) |
|
||||
| recent_days | 30 | Retrieve interfaces updated within a recent time range (in days), reducing appropriately can avoid matching issues |
|
||||
| ipv_type | ipv4 | The type of interface in the generated result, optional values: ipv4, ipv6, all |
|
||||
| domain_blacklist | epg.pw | Interface domain blacklist, used to filter out interfaces with low-quality, ad-inclusive domains |
|
||||
| url_keywords_blacklist | | Interface keyword blacklist, used to filter out interfaces containing specific characters |
|
||||
| open_subscribe | False | Enable subscription source feature |
|
||||
| subscribe_urls | https://m3u.ibert.me/txt/fmml_dv6.txt,<br>https://m3u.ibert.me/txt/o_cn.txt,<br>https://m3u.ibert.me/txt/j_iptv.txt,<br>https://github.moeyy.xyz/https://raw.githubusercontent.com/PizazzGY/TVBox/main/live.txt | Subscription source list |
|
||||
| open_multicast | False | Enable multicast source function |
|
||||
| region_list | 广东 | Multicast source region list, [more regions](../updates/multicast/multicast_map.json, all means all regions) |
|
||||
| multicast_page_num | 3 | Number of pages to retrieve for multicast regions |
|
||||
| open_hotel | True | Enable hotel source feature |
|
||||
| open_hotel_tonkiang | False | Enable Tonkiang hotel source work mode |
|
||||
| open_hotel_fofa | True | Enable FOFA、ZoomEye hotel source work mode |
|
||||
| hotel_region_list | 全部 | List of hotel source regions, [more regions](../updates/fofa/fofa_map.py), 'all' indicates all regions |
|
||||
| hotel_page_num | 3 | Number of pages to retrieve for hotel regions |
|
||||
| Configuration Item | Default Value | Description |
|
||||
| ----------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| open_update | True | Enable updates, if disabled then only the result page service is run |
|
||||
| open_use_old_result | True | Enable the use of historical update results and merge them into the current update |
|
||||
| open_driver | True | Enable browser execution, If there are no updates, this mode can be enabled, which consumes more performance |
|
||||
| open_proxy | False | Enable proxy, automatically obtains free available proxies, If there are no updates, this mode can be enabled |
|
||||
| source_file | config/demo.txt | Template file path |
|
||||
| final_file | output/result.txt | Generated result file path |
|
||||
| open_online_search | False | Enable online search source feature |
|
||||
| online_search_page_num | 3 | Page retrieval quantity for online search channels |
|
||||
| urls_limit | 15 | Number of interfaces per channel |
|
||||
| open_keep_all | False | Retain all search results, retain results with non-template channel names, recommended to be turned on when manually maintaining |
|
||||
| open_sort | True | Enable the sorting function (response speed, date, resolution) |
|
||||
| open_ffmpeg | True | Enable speed testing using FFmpeg to obtain more accurate speed and resolution information. Manual installation is required in advance. |
|
||||
| open_m3u_result | True | Enable the conversion to generate m3u file type result links, supporting the display of channel icons |
|
||||
| response_time_weight | 0.5 | Response time weight value (the sum of all weight values should be 1) |
|
||||
| resolution_weight | 0.5 | Resolution weight value (the sum of all weight values should be 1) |
|
||||
| recent_days | 30 | Retrieve interfaces updated within a recent time range (in days), reducing appropriately can avoid matching issues |
|
||||
| ipv_type | ipv4 | The type of interface in the generated result, optional values: ipv4, ipv6, all |
|
||||
| domain_blacklist | epg.pw | Interface domain blacklist, used to filter out interfaces with low-quality, ad-inclusive domains |
|
||||
| url_keywords_blacklist | | Interface keyword blacklist, used to filter out interfaces containing specific characters |
|
||||
| open_subscribe | False | Enable subscription source feature |
|
||||
| subscribe_urls | https://m3u.ibert.me/txt/fmml_dv6.txt,<br>https://m3u.ibert.me/txt/o_cn.txt,<br>https://m3u.ibert.me/txt/j_iptv.txt,<br>https://github.moeyy.xyz/https://raw.githubusercontent.com/PizazzGY/TVBox/main/live.txt | Subscription source list |
|
||||
| open_multicast | True | Enable multicast source function |
|
||||
| open_multicast_tonkiang | True | Enable Tonkiang multicast source work mode |
|
||||
| open_multicast_fofa | True | Enable FOFA multicast source work mode |
|
||||
| multicast_region_list | all | Multicast source region list, [more regions](../updates/multicast/multicast_map.json, all means all regions) |
|
||||
| multicast_page_num | 3 | Number of pages to retrieve for multicast regions |
|
||||
| open_hotel | True | Enable hotel source feature |
|
||||
| open_hotel_tonkiang | False | Enable Tonkiang hotel source work mode |
|
||||
| open_hotel_fofa | True | Enable FOFA、ZoomEye hotel source work mode |
|
||||
| hotel_region_list | all | List of hotel source regions, [more regions](../updates/fofa/fofa_map.py), 'all' indicates all regions |
|
||||
| hotel_page_num | 3 | Number of pages to retrieve for hotel regions |
|
||||
|
88
main.py
88
main.py
@ -95,10 +95,10 @@ class UpdateSource:
|
||||
task_func(subscribe_urls, callback=self.update_progress)
|
||||
)
|
||||
elif setting == "open_hotel_tonkiang" or setting == "open_hotel_fofa":
|
||||
task = asyncio.create_task(task_func(self.update_progress))
|
||||
task = asyncio.create_task(task_func(callback=self.update_progress))
|
||||
else:
|
||||
task = asyncio.create_task(
|
||||
task_func(channel_names, self.update_progress)
|
||||
task_func(channel_names, callback=self.update_progress)
|
||||
)
|
||||
self.tasks.append(task)
|
||||
setattr(self, result_attr, await task)
|
||||
@ -155,48 +155,48 @@ class UpdateSource:
|
||||
self.channel_data,
|
||||
callback=sort_callback,
|
||||
)
|
||||
no_result_cate_names = [
|
||||
(cate, name)
|
||||
for cate, channel_obj in self.channel_data.items()
|
||||
for name, info_list in channel_obj.items()
|
||||
if len(info_list) < 3
|
||||
]
|
||||
no_result_names = [name for (_, name) in no_result_cate_names]
|
||||
if no_result_names:
|
||||
print(
|
||||
f"Not enough url found for {', '.join(no_result_names)}, try a supplementary multicast search..."
|
||||
)
|
||||
sup_results = await get_channels_by_multicast(
|
||||
no_result_names, self.update_progress
|
||||
)
|
||||
sup_channel_items = defaultdict(lambda: defaultdict(list))
|
||||
for cate, name in no_result_cate_names:
|
||||
data = sup_results.get(name)
|
||||
if data:
|
||||
sup_channel_items[cate][name] = data
|
||||
self.total = len(
|
||||
[
|
||||
url
|
||||
for obj in sup_channel_items.values()
|
||||
for url_list in obj.values()
|
||||
for url in url_list
|
||||
]
|
||||
)
|
||||
if self.total > 0 and config.getboolean("Settings", "open_sort"):
|
||||
self.update_progress(
|
||||
f"正在对补充频道测速排序, 共{len([name for obj in sup_channel_items.values() for name in obj.keys()])}个频道, 含{self.total}个接口",
|
||||
0,
|
||||
)
|
||||
self.start_time = time()
|
||||
self.pbar = tqdm_asyncio(total=self.total, desc="Sorting")
|
||||
sup_channel_items = await process_sort_channel_list(
|
||||
sup_channel_items,
|
||||
callback=sort_callback,
|
||||
)
|
||||
self.channel_data = merge_objects(
|
||||
self.channel_data, sup_channel_items
|
||||
)
|
||||
self.total = self.get_urls_len()
|
||||
# no_result_cate_names = [
|
||||
# (cate, name)
|
||||
# for cate, channel_obj in self.channel_data.items()
|
||||
# for name, info_list in channel_obj.items()
|
||||
# if len(info_list) < 3
|
||||
# ]
|
||||
# no_result_names = [name for (_, name) in no_result_cate_names]
|
||||
# if no_result_names:
|
||||
# print(
|
||||
# f"Not enough url found for {', '.join(no_result_names)}, try a supplementary multicast search..."
|
||||
# )
|
||||
# sup_results = await get_channels_by_multicast(
|
||||
# no_result_names, self.update_progress
|
||||
# )
|
||||
# sup_channel_items = defaultdict(lambda: defaultdict(list))
|
||||
# for cate, name in no_result_cate_names:
|
||||
# data = sup_results.get(name)
|
||||
# if data:
|
||||
# sup_channel_items[cate][name] = data
|
||||
# self.total = len(
|
||||
# [
|
||||
# url
|
||||
# for obj in sup_channel_items.values()
|
||||
# for url_list in obj.values()
|
||||
# for url in url_list
|
||||
# ]
|
||||
# )
|
||||
# if self.total > 0 and config.getboolean("Settings", "open_sort"):
|
||||
# self.update_progress(
|
||||
# f"正在对补充频道测速排序, 共{len([name for obj in sup_channel_items.values() for name in obj.keys()])}个频道, 含{self.total}个接口",
|
||||
# 0,
|
||||
# )
|
||||
# self.start_time = time()
|
||||
# self.pbar = tqdm_asyncio(total=self.total, desc="Sorting")
|
||||
# sup_channel_items = await process_sort_channel_list(
|
||||
# sup_channel_items,
|
||||
# callback=sort_callback,
|
||||
# )
|
||||
# self.channel_data = merge_objects(
|
||||
# self.channel_data, sup_channel_items
|
||||
# )
|
||||
# self.total = self.get_urls_len()
|
||||
self.pbar = tqdm(total=self.total, desc="Writing")
|
||||
self.start_time = time()
|
||||
write_channel_to_file(
|
||||
|
@ -11,6 +11,7 @@ from utils.channel import format_channel_name
|
||||
from utils.tools import merge_objects, get_pbar_remaining
|
||||
from updates.proxy import get_proxy, get_proxy_next
|
||||
from requests_custom.utils import get_source_requests, close_session
|
||||
from collections import defaultdict
|
||||
|
||||
timeout = 10
|
||||
|
||||
@ -22,7 +23,7 @@ def get_fofa_urls_from_region_list():
|
||||
# region_list = config.get("Settings", "hotel_region_list").split(",")
|
||||
urls = []
|
||||
region_url = getattr(fofa_map, "region_url")
|
||||
# if "all" in region_list or "全部" in region_list:
|
||||
# if "all" in region_list or "ALL" in region_list or "全部" in region_list:
|
||||
urls = [url for url_list in region_url.values() for url in url_list if url]
|
||||
# else:
|
||||
# for region in region_list:
|
||||
@ -31,25 +32,35 @@ def get_fofa_urls_from_region_list():
|
||||
return urls
|
||||
|
||||
|
||||
async def get_channels_by_fofa(callback):
|
||||
async def get_channels_by_fofa(urls=None, multicast=False, callback=None):
|
||||
"""
|
||||
Get the channel by FOFA
|
||||
"""
|
||||
fofa_urls = get_fofa_urls_from_region_list()
|
||||
fofa_urls = urls if urls else get_fofa_urls_from_region_list()
|
||||
fofa_urls_len = len(fofa_urls)
|
||||
pbar = tqdm_asyncio(total=fofa_urls_len, desc="Processing fofa")
|
||||
pbar = tqdm_asyncio(
|
||||
total=fofa_urls_len,
|
||||
desc=f"Processing fofa {'for multicast' if multicast else 'for hotel'}",
|
||||
)
|
||||
start_time = time()
|
||||
fofa_results = {}
|
||||
callback(f"正在获取Fofa源更新, 共{fofa_urls_len}个地区", 0)
|
||||
mode_name = {"组播" if multicast else "酒店"}
|
||||
if callback:
|
||||
callback(
|
||||
f"正在获取Fofa{mode_name}源更新, 共{fofa_urls_len}个查询地址",
|
||||
0,
|
||||
)
|
||||
proxy = None
|
||||
open_proxy = config.getboolean("Settings", "open_proxy")
|
||||
open_driver = config.getboolean("Settings", "open_driver")
|
||||
if open_proxy:
|
||||
proxy = await get_proxy(fofa_urls[0], best=True, with_test=True)
|
||||
test_url = fofa_urls[0][0] if multicast else fofa_urls[0]
|
||||
proxy = await get_proxy(test_url, best=True, with_test=True)
|
||||
|
||||
def process_fofa_channels(fofa_url):
|
||||
def process_fofa_channels(fofa_info):
|
||||
nonlocal proxy, fofa_urls_len, open_driver
|
||||
results = {}
|
||||
fofa_url = fofa_info[0] if multicast else fofa_info
|
||||
results = defaultdict(lambda: defaultdict(list))
|
||||
try:
|
||||
if open_driver:
|
||||
driver = setup_driver(proxy)
|
||||
@ -69,11 +80,18 @@ async def get_channels_by_fofa(callback):
|
||||
)
|
||||
fofa_source = re.sub(r"<!--.*?-->", "", page_source, flags=re.DOTALL)
|
||||
urls = set(re.findall(r"https?://[\w\.-]+:\d+", fofa_source))
|
||||
|
||||
with ThreadPoolExecutor(max_workers=100) as executor:
|
||||
futures = [executor.submit(process_fofa_json_url, url) for url in urls]
|
||||
for future in futures:
|
||||
results = merge_objects(results, future.result())
|
||||
if multicast:
|
||||
region = fofa_info[1]
|
||||
type = fofa_info[2]
|
||||
multicast_result = [(url, None, None) for url in urls]
|
||||
results[region][type] = multicast_result
|
||||
else:
|
||||
with ThreadPoolExecutor(max_workers=100) as executor:
|
||||
futures = [
|
||||
executor.submit(process_fofa_json_url, url) for url in urls
|
||||
]
|
||||
for future in futures:
|
||||
results = merge_objects(results, future.result())
|
||||
except Exception as e:
|
||||
print(e)
|
||||
finally:
|
||||
@ -82,10 +100,11 @@ async def get_channels_by_fofa(callback):
|
||||
driver.quit()
|
||||
pbar.update()
|
||||
remain = fofa_urls_len - pbar.n
|
||||
callback(
|
||||
f"正在获取Fofa源更新, 剩余{remain}个地区待获取, 预计剩余时间: {get_pbar_remaining(n=pbar.n, total=pbar.total, start_time=start_time)}",
|
||||
int((pbar.n / fofa_urls_len) * 100),
|
||||
)
|
||||
if callback:
|
||||
callback(
|
||||
f"正在获取Fofa{mode_name}源更新, 剩余{remain}个查询地址待获取, 预计剩余时间: {get_pbar_remaining(n=pbar.n, total=pbar.total, start_time=start_time)}",
|
||||
int((pbar.n / fofa_urls_len) * 100),
|
||||
)
|
||||
return results
|
||||
|
||||
max_workers = 3 if open_driver else 10
|
||||
|
@ -25,7 +25,7 @@ from collections import defaultdict
|
||||
import updates.fofa.fofa_map as fofa_map
|
||||
|
||||
|
||||
async def get_channels_by_hotel(callback):
|
||||
async def get_channels_by_hotel(callback=None):
|
||||
"""
|
||||
Get the channels by multicase
|
||||
"""
|
||||
@ -36,7 +36,7 @@ async def get_channels_by_hotel(callback):
|
||||
open_driver = config.getboolean("Settings", "open_driver")
|
||||
page_num = config.getint("Settings", "hotel_page_num")
|
||||
region_list = config.get("Settings", "hotel_region_list").split(",")
|
||||
if "all" in region_list or "全部" in region_list:
|
||||
if "all" in region_list or "ALL" in region_list or "全部" in region_list:
|
||||
fofa_region_name_list = list(getattr(fofa_map, "region_url").keys())
|
||||
region_list = fofa_region_name_list
|
||||
if open_proxy:
|
||||
@ -51,7 +51,10 @@ async def get_channels_by_hotel(callback):
|
||||
if open_driver:
|
||||
driver = setup_driver(proxy)
|
||||
try:
|
||||
retry_func(lambda: driver.get(pageUrl), name=f"hotel search:{name}")
|
||||
retry_func(
|
||||
lambda: driver.get(pageUrl),
|
||||
name=f"Tonkiang hotel search:{name}",
|
||||
)
|
||||
except Exception as e:
|
||||
if open_proxy:
|
||||
proxy = get_proxy_next()
|
||||
@ -67,7 +70,7 @@ async def get_channels_by_hotel(callback):
|
||||
try:
|
||||
page_soup = retry_func(
|
||||
lambda: get_soup_requests(pageUrl, data=post_form, proxy=proxy),
|
||||
name=f"hotel search:{name}",
|
||||
name=f"Tonkiang hotel search:{name}",
|
||||
)
|
||||
except Exception as e:
|
||||
if open_proxy:
|
||||
@ -141,15 +144,17 @@ async def get_channels_by_hotel(callback):
|
||||
driver.close()
|
||||
driver.quit()
|
||||
pbar.update()
|
||||
callback(
|
||||
f"正在进行酒店源更新, 剩余{region_list_len - pbar.n}个地区待查询, 预计剩余时间: {get_pbar_remaining(n=pbar.n, total=pbar.total, start_time=start_time)}",
|
||||
int((pbar.n / region_list_len) * 100),
|
||||
)
|
||||
if callback:
|
||||
callback(
|
||||
f"正在进行Tonkiang酒店源更新, 剩余{region_list_len - pbar.n}个地区待查询, 预计剩余时间: {get_pbar_remaining(n=pbar.n, total=pbar.total, start_time=start_time)}",
|
||||
int((pbar.n / region_list_len) * 100),
|
||||
)
|
||||
return {"region": region, "type": type, "data": info_list}
|
||||
|
||||
region_list_len = len(region_list)
|
||||
pbar = tqdm_asyncio(total=region_list_len, desc="Hotel search")
|
||||
callback(f"正在进行酒店源更新, 共{region_list_len}个地区", 0)
|
||||
pbar = tqdm_asyncio(total=region_list_len, desc="Tonkiang hotel search")
|
||||
if callback:
|
||||
callback(f"正在进行Tonkiang酒店源更新, 共{region_list_len}个地区", 0)
|
||||
search_region_result = defaultdict(list)
|
||||
with ThreadPoolExecutor(max_workers=3) as executor:
|
||||
futures = {
|
||||
@ -174,7 +179,7 @@ async def get_channels_by_hotel(callback):
|
||||
for url, _, _ in result
|
||||
]
|
||||
channels = await get_channels_by_subscribe_urls(
|
||||
urls, retry=False, error_print=False, with_cache=True
|
||||
urls, hotel=True, retry=False, error_print=False, with_cache=True
|
||||
)
|
||||
if not open_driver:
|
||||
close_session()
|
||||
|
@ -1,15 +1,15 @@
|
||||
from asyncio import create_task, gather
|
||||
from utils.speed import get_speed
|
||||
from utils.channel import (
|
||||
get_results_from_multicast_soup,
|
||||
get_results_from_multicast_soup_requests,
|
||||
get_channel_multicast_name_region_type_result,
|
||||
get_channel_multicast_region_type_list,
|
||||
get_channel_multicast_result,
|
||||
get_multicast_fofa_search_urls,
|
||||
)
|
||||
from utils.tools import get_pbar_remaining, get_soup
|
||||
from utils.config import config, resource_path
|
||||
from updates.proxy import get_proxy, get_proxy_next
|
||||
from updates.fofa import get_channels_by_fofa
|
||||
from time import time
|
||||
from driver.setup import setup_driver
|
||||
from driver.utils import search_submit
|
||||
@ -27,13 +27,15 @@ import json
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
async def get_channels_by_multicast(names, callback):
|
||||
async def get_channels_by_multicast(names, callback=None):
|
||||
"""
|
||||
Get the channels by multicase
|
||||
"""
|
||||
channels = {}
|
||||
pageUrl = "http://tonkiang.us/hoteliptv.php"
|
||||
proxy = None
|
||||
open_multicast_tonkiang = config.getboolean("Settings", "open_multicast_tonkiang")
|
||||
open_multicast_fofa = config.getboolean("Settings", "open_multicast_fofa")
|
||||
open_proxy = config.getboolean("Settings", "open_proxy")
|
||||
open_driver = config.getboolean("Settings", "open_driver")
|
||||
page_num = config.getint("Settings", "multicast_page_num")
|
||||
@ -50,6 +52,12 @@ async def get_channels_by_multicast(names, callback):
|
||||
multicast_region_result, names
|
||||
)
|
||||
region_type_list = get_channel_multicast_region_type_list(name_region_type_result)
|
||||
search_region_type_result = defaultdict(lambda: defaultdict(list))
|
||||
if open_multicast_fofa:
|
||||
fofa_search_urls = get_multicast_fofa_search_urls()
|
||||
search_region_type_result = await get_channels_by_fofa(
|
||||
fofa_search_urls, multicast=True
|
||||
)
|
||||
|
||||
def process_channel_by_multicast(region, type):
|
||||
nonlocal proxy, open_driver, page_num
|
||||
@ -144,41 +152,47 @@ async def get_channels_by_multicast(names, callback):
|
||||
driver.close()
|
||||
driver.quit()
|
||||
pbar.update()
|
||||
callback(
|
||||
f"正在进行组播更新, 剩余{region_type_list_len - pbar.n}个地区组播源待查询, 预计剩余时间: {get_pbar_remaining(n=pbar.n, total=pbar.total, start_time=start_time)}",
|
||||
int((pbar.n / region_type_list_len) * 100),
|
||||
)
|
||||
if callback:
|
||||
callback(
|
||||
f"正在进行Tonkiang组播更新, 剩余{region_type_list_len - pbar.n}个地区待查询, 预计剩余时间: {get_pbar_remaining(n=pbar.n, total=pbar.total, start_time=start_time)}",
|
||||
int((pbar.n / region_type_list_len) * 100),
|
||||
)
|
||||
return {"region": region, "type": type, "data": info_list}
|
||||
|
||||
region_type_list_len = len(region_type_list)
|
||||
pbar = tqdm_asyncio(total=region_type_list_len, desc="Multicast search")
|
||||
callback(
|
||||
f"正在进行组播更新, {len(names)}个频道, 共{region_type_list_len}个地区组播源", 0
|
||||
)
|
||||
search_region_type_result = defaultdict(lambda: defaultdict(list))
|
||||
with ThreadPoolExecutor(max_workers=3) as executor:
|
||||
futures = {
|
||||
executor.submit(process_channel_by_multicast, region, type): (region, type)
|
||||
for region, type in region_type_list
|
||||
}
|
||||
if open_multicast_tonkiang:
|
||||
region_type_list_len = len(region_type_list)
|
||||
pbar = tqdm_asyncio(total=region_type_list_len, desc="Multicast search")
|
||||
if callback:
|
||||
callback(
|
||||
f"正在进行Tonkiang组播更新, {len(names)}个频道, 共{region_type_list_len}个地区",
|
||||
0,
|
||||
)
|
||||
with ThreadPoolExecutor(max_workers=3) as executor:
|
||||
futures = {
|
||||
executor.submit(process_channel_by_multicast, region, type): (
|
||||
region,
|
||||
type,
|
||||
)
|
||||
for region, type in region_type_list
|
||||
}
|
||||
|
||||
for future in as_completed(futures):
|
||||
region, type = futures[future]
|
||||
result = future.result()
|
||||
data = result.get("data")
|
||||
for future in as_completed(futures):
|
||||
region, type = futures[future]
|
||||
result = future.result()
|
||||
data = result.get("data")
|
||||
|
||||
if data:
|
||||
for item in data:
|
||||
url = item.get("url")
|
||||
date = item.get("date")
|
||||
if url:
|
||||
search_region_type_result[region][type].append(
|
||||
(url, date, None)
|
||||
)
|
||||
if data:
|
||||
for item in data:
|
||||
url = item.get("url")
|
||||
date = item.get("date")
|
||||
if url:
|
||||
search_region_type_result[region][type].append(
|
||||
(url, date, None)
|
||||
)
|
||||
pbar.close()
|
||||
channels = get_channel_multicast_result(
|
||||
name_region_type_result, search_region_type_result
|
||||
)
|
||||
if not open_driver:
|
||||
close_session()
|
||||
pbar.close()
|
||||
return channels
|
||||
|
@ -40,7 +40,7 @@ async def use_accessible_url(callback):
|
||||
return baseUrl2
|
||||
|
||||
|
||||
async def get_channels_by_online_search(names, callback):
|
||||
async def get_channels_by_online_search(names, callback=None):
|
||||
"""
|
||||
Get the channels by online search
|
||||
"""
|
||||
@ -183,15 +183,17 @@ async def get_channels_by_online_search(names, callback):
|
||||
driver.close()
|
||||
driver.quit()
|
||||
pbar.update()
|
||||
callback(
|
||||
f"正在线上查询更新, 剩余{names_len - pbar.n}个频道待查询, 预计剩余时间: {get_pbar_remaining(n=pbar.n, total=pbar.total, start_time=start_time)}",
|
||||
int((pbar.n / names_len) * 100),
|
||||
)
|
||||
if callback:
|
||||
callback(
|
||||
f"正在线上查询更新, 剩余{names_len - pbar.n}个频道待查询, 预计剩余时间: {get_pbar_remaining(n=pbar.n, total=pbar.total, start_time=start_time)}",
|
||||
int((pbar.n / names_len) * 100),
|
||||
)
|
||||
return {"name": format_channel_name(name), "data": info_list}
|
||||
|
||||
names_len = len(names)
|
||||
pbar = tqdm_asyncio(total=names_len, desc="Online search")
|
||||
callback(f"正在线上查询更新, 共{names_len}个频道", 0)
|
||||
if callback:
|
||||
callback(f"正在线上查询更新, 共{names_len}个频道", 0)
|
||||
with ThreadPoolExecutor(max_workers=3) as executor:
|
||||
futures = [
|
||||
executor.submit(process_channel_by_online_search, name) for name in names
|
||||
|
@ -13,7 +13,13 @@ timeout = 10
|
||||
|
||||
|
||||
async def get_channels_by_subscribe_urls(
|
||||
urls, multicast=False, retry=True, error_print=True, with_cache=False, callback=None
|
||||
urls,
|
||||
multicast=False,
|
||||
hotel=False,
|
||||
retry=True,
|
||||
error_print=True,
|
||||
with_cache=False,
|
||||
callback=None,
|
||||
):
|
||||
"""
|
||||
Get the channels by subscribe urls
|
||||
@ -26,10 +32,17 @@ async def get_channels_by_subscribe_urls(
|
||||
if url.strip()
|
||||
]
|
||||
subscribe_urls_len = len(urls if urls else subscribe_urls)
|
||||
pbar = tqdm_asyncio(total=subscribe_urls_len, desc="Processing subscribe")
|
||||
pbar = tqdm_asyncio(
|
||||
total=subscribe_urls_len,
|
||||
desc=f"Processing subscribe {'for multicast' if multicast else ''}",
|
||||
)
|
||||
start_time = time()
|
||||
mode_name = "组播" if multicast else "酒店" if hotel else "订阅"
|
||||
if callback:
|
||||
callback(f"正在获取订阅源更新, 共{subscribe_urls_len}个订阅源", 0)
|
||||
callback(
|
||||
f"正在获取{mode_name}源更新, 共{subscribe_urls_len}个{mode_name}源",
|
||||
0,
|
||||
)
|
||||
session = Session()
|
||||
|
||||
def process_subscribe_channels(subscribe_info):
|
||||
@ -91,7 +104,7 @@ async def get_channels_by_subscribe_urls(
|
||||
remain = subscribe_urls_len - pbar.n
|
||||
if callback:
|
||||
callback(
|
||||
f"正在获取订阅源更新, 剩余{remain}个订阅源待获取, 预计剩余时间: {get_pbar_remaining(n=pbar.n, total=pbar.total, start_time=start_time)}",
|
||||
f"正在获取{mode_name}源更新, 剩余{remain}个{mode_name}源待获取, 预计剩余时间: {get_pbar_remaining(n=pbar.n, total=pbar.total, start_time=start_time)}",
|
||||
int((pbar.n / subscribe_urls_len) * 100),
|
||||
)
|
||||
return channels
|
||||
|
@ -1,4 +1,4 @@
|
||||
from utils.config import config, resource_path, save_config
|
||||
from utils.config import config, resource_path
|
||||
from utils.tools import check_url_by_patterns, get_total_urls_from_info_list
|
||||
from utils.speed import sort_urls_by_speed_and_resolution, is_ffmpeg_installed
|
||||
import os
|
||||
@ -9,6 +9,7 @@ import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from opencc import OpenCC
|
||||
import asyncio
|
||||
import base64
|
||||
|
||||
log_dir = "output"
|
||||
log_file = "result_new.log"
|
||||
@ -209,6 +210,7 @@ def get_channel_multicast_region_type_list(result):
|
||||
for region_type in result.values()
|
||||
for region, types in region_type.items()
|
||||
if "all" in config_region_list
|
||||
or "ALL" in config_region_list
|
||||
or "全部" in config_region_list
|
||||
or region in config_region_list
|
||||
for type in types
|
||||
@ -641,3 +643,48 @@ def write_channel_to_file(items, data, callback=None):
|
||||
for handler in logging.root.handlers[:]:
|
||||
handler.close()
|
||||
logging.root.removeHandler(handler)
|
||||
|
||||
|
||||
def get_multicast_fofa_search_org(region, type):
|
||||
"""
|
||||
Get the fofa search organization for multicast
|
||||
"""
|
||||
org = None
|
||||
if region == "北京" and type == "联通":
|
||||
org = "China Unicom Beijing Province Network"
|
||||
elif type == "联通":
|
||||
org = "CHINA UNICOM China169 Backbone"
|
||||
elif type == "电信":
|
||||
org = "Chinanet"
|
||||
elif type == "移动":
|
||||
org == "China Mobile communications corporation"
|
||||
return org
|
||||
|
||||
|
||||
def get_multicast_fofa_search_urls():
|
||||
"""
|
||||
Get the fofa search urls for multicast
|
||||
"""
|
||||
config_region_list = config.get("Settings", "multicast_region_list").split(",")
|
||||
rtp_file_names = []
|
||||
for filename in os.listdir(resource_path("updates/multicast/rtp")):
|
||||
if filename.endswith(".txt") and "_" in filename:
|
||||
filename = filename.replace(".txt", "")
|
||||
rtp_file_names.append(filename)
|
||||
region_type_list = [
|
||||
(parts[0], parts[1])
|
||||
for name in rtp_file_names
|
||||
if (parts := name.split("_"))[0] in config_region_list
|
||||
or "all" in config_region_list
|
||||
or "ALL" in config_region_list
|
||||
or "全部" in config_region_list
|
||||
]
|
||||
search_urls = []
|
||||
for region, type in region_type_list:
|
||||
search_url = "https://fofa.info/result?qbase64="
|
||||
search_txt = f'"udpxy" && country="CN" && region="{region}" && org="{get_multicast_fofa_search_org(region,type)}"'
|
||||
bytes_string = search_txt.encode("utf-8")
|
||||
search_txt = base64.b64encode(bytes_string).decode("utf-8")
|
||||
search_url += search_txt
|
||||
search_urls.append((search_url, region, type))
|
||||
return search_urls
|
||||
|
Loading…
x
Reference in New Issue
Block a user