feat:multicast
This commit is contained in:
parent
cbfd35fb32
commit
c0346bb26c
@ -15,7 +15,7 @@ favorite_list = [
|
||||
"湖南卫视",
|
||||
"翡翠台",
|
||||
]
|
||||
open_online_search = True
|
||||
open_online_search = False
|
||||
favorite_page_num = 5
|
||||
default_page_num = 3
|
||||
urls_limit = 15
|
||||
@ -27,7 +27,7 @@ recent_days = 30
|
||||
ipv_type = "ipv4"
|
||||
domain_blacklist = ["epg.pw"]
|
||||
url_keywords_blacklist = []
|
||||
open_subscribe = True
|
||||
open_subscribe = False
|
||||
subscribe_urls = [
|
||||
"https://m3u.ibert.me/txt/fmml_dv6.txt",
|
||||
"https://m3u.ibert.me/txt/o_cn.txt",
|
||||
|
@ -72,7 +72,7 @@ async def get_channels_by_fofa(callback):
|
||||
with ThreadPoolExecutor(max_workers=100) as executor:
|
||||
futures = [executor.submit(process_fofa_json_url, url) for url in urls]
|
||||
for future in futures:
|
||||
merge_objects(results, future.result())
|
||||
results = merge_objects(results, future.result())
|
||||
except Exception as e:
|
||||
print(e)
|
||||
finally:
|
||||
@ -95,7 +95,7 @@ async def get_channels_by_fofa(callback):
|
||||
executor.submit(process_fofa_channels, fofa_url) for fofa_url in fofa_urls
|
||||
]
|
||||
for future in futures:
|
||||
merge_objects(fofa_results, future.result())
|
||||
fofa_results = merge_objects(fofa_results, future.result())
|
||||
if not config.open_driver:
|
||||
close_session()
|
||||
pbar.close()
|
||||
|
3
main.py
3
main.py
@ -14,6 +14,7 @@ from utils.tools import (
|
||||
)
|
||||
from subscribe import get_channels_by_subscribe_urls
|
||||
from fofa import get_channels_by_fofa
|
||||
from multicast import get_channels_by_multicast
|
||||
from online_search import get_channels_by_online_search
|
||||
import os
|
||||
from tqdm import tqdm
|
||||
@ -58,7 +59,7 @@ class UpdateSource:
|
||||
self.subscribe_result = await subscribe_task
|
||||
if config.open_multicast:
|
||||
multicast_task = asyncio.create_task(
|
||||
get_channels_by_fofa(self.update_progress)
|
||||
get_channels_by_multicast(channel_names, self.update_progress)
|
||||
)
|
||||
self.tasks.append(multicast_task)
|
||||
self.multicast_result = await multicast_task
|
||||
|
@ -0,0 +1 @@
|
||||
from .request import get_channels_by_multicast
|
@ -5,7 +5,12 @@ from utils.channel import (
|
||||
get_results_from_multicast_soup,
|
||||
get_results_from_multicast_soup_requests,
|
||||
)
|
||||
from utils.tools import check_url_by_patterns, get_pbar_remaining, get_soup
|
||||
from utils.tools import (
|
||||
check_url_by_patterns,
|
||||
get_pbar_remaining,
|
||||
get_soup,
|
||||
get_total_urls_from_info_list,
|
||||
)
|
||||
from utils.config import get_config
|
||||
from proxy import get_proxy, get_proxy_next
|
||||
from time import time, sleep
|
||||
@ -23,6 +28,7 @@ import urllib.parse as urlparse
|
||||
from urllib.parse import parse_qs
|
||||
import multicast_map
|
||||
from subscribe import get_channels_by_subscribe_urls
|
||||
import re
|
||||
|
||||
config = get_config()
|
||||
|
||||
@ -80,18 +86,33 @@ def get_multicast_urls_from_region_list():
|
||||
return urls
|
||||
|
||||
|
||||
def get_multicast_ip_list(urls):
|
||||
"""
|
||||
Get multicast ip from url
|
||||
"""
|
||||
ip_list = []
|
||||
for url in urls:
|
||||
pattern = r"rtp://((\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})(?::(\d+))?)"
|
||||
matcher = re.search(pattern, url)
|
||||
if matcher:
|
||||
ip_list.append(matcher.group(1))
|
||||
return ip_list
|
||||
|
||||
|
||||
async def get_channels_by_multicast(names, callback):
|
||||
"""
|
||||
Get the channels by multicase
|
||||
"""
|
||||
multicast_urls = get_multicast_urls_from_region_list()
|
||||
multicast_results = get_channels_by_subscribe_urls(
|
||||
urls=multicast_urls, callback=callback
|
||||
multicast_region_urls = get_multicast_urls_from_region_list()
|
||||
multicast_results = await get_channels_by_subscribe_urls(
|
||||
urls=multicast_region_urls, callback=callback
|
||||
)
|
||||
channels = {}
|
||||
# pageUrl = await use_accessible_url(callback)
|
||||
pageUrl = "http://tonkiang.us/hoteliptv.php"
|
||||
if not pageUrl:
|
||||
# if not pageUrl:
|
||||
# return channels
|
||||
if not multicast_results:
|
||||
return channels
|
||||
proxy = None
|
||||
if config.open_proxy:
|
||||
@ -99,7 +120,15 @@ async def get_channels_by_multicast(names, callback):
|
||||
start_time = time()
|
||||
|
||||
def process_channel_by_multicast(name):
|
||||
format_name = format_channel_name(name)
|
||||
info_list = []
|
||||
multicast_info_list = multicast_results.get(format_name)
|
||||
if not multicast_info_list:
|
||||
return {"name": format_name, "data": info_list}
|
||||
multicast_urls = get_total_urls_from_info_list(multicast_info_list)
|
||||
multicast_ip_list = get_multicast_ip_list(multicast_urls)
|
||||
if not multicast_ip_list:
|
||||
return {"name": format_name, "data": info_list}
|
||||
nonlocal proxy
|
||||
try:
|
||||
if config.open_driver:
|
||||
@ -131,7 +160,7 @@ async def get_channels_by_multicast(names, callback):
|
||||
page_soup = get_soup_requests(request_url, proxy=proxy)
|
||||
if not page_soup:
|
||||
print(f"{name}:Request fail.")
|
||||
return
|
||||
return {"name": format_name, "data": info_list}
|
||||
else:
|
||||
a_tags = page_soup.find_all("a", href=True)
|
||||
for a_tag in a_tags:
|
||||
@ -182,11 +211,9 @@ async def get_channels_by_multicast(names, callback):
|
||||
)
|
||||
if soup:
|
||||
results = (
|
||||
get_results_from_multicast_soup(soup, name)
|
||||
get_results_from_multicast_soup(soup)
|
||||
if config.open_driver
|
||||
else get_results_from_multicast_soup_requests(
|
||||
soup, name
|
||||
)
|
||||
else get_results_from_multicast_soup_requests(soup)
|
||||
)
|
||||
print(name, "page:", page, "results num:", len(results))
|
||||
if len(results) == 0:
|
||||
@ -219,7 +246,9 @@ async def get_channels_by_multicast(names, callback):
|
||||
for result in results:
|
||||
url, date, resolution = result
|
||||
if url and check_url_by_patterns(url):
|
||||
info_list.append((url, date, resolution))
|
||||
for ip in multicast_ip_list:
|
||||
total_url = f"http://{url}/rtp/{ip}"
|
||||
info_list.append((total_url, date, resolution))
|
||||
break
|
||||
else:
|
||||
print(
|
||||
@ -246,7 +275,7 @@ async def get_channels_by_multicast(names, callback):
|
||||
f"正在进行组播更新, 剩余{names_len - pbar.n}个频道待查询, 预计剩余时间: {get_pbar_remaining(pbar, start_time)}",
|
||||
int((pbar.n / names_len) * 100),
|
||||
)
|
||||
return {"name": format_channel_name(name), "data": info_list}
|
||||
return {"name": format_name, "data": info_list}
|
||||
|
||||
names_len = len(names)
|
||||
pbar = tqdm_asyncio(total=names_len, desc="Multicast search")
|
||||
|
@ -42,14 +42,14 @@ async def get_channels_by_subscribe_urls(urls=None, callback=None):
|
||||
for line in lines:
|
||||
matcher = re.match(pattern, line)
|
||||
if matcher is not None:
|
||||
key = matcher.group(1)
|
||||
key = matcher.group(1).strip()
|
||||
resolution_match = re.search(r"_(\((.*?)\))", key)
|
||||
resolution = (
|
||||
resolution_match.group(2)
|
||||
if resolution_match is not None
|
||||
else None
|
||||
)
|
||||
url = matcher.group(2)
|
||||
url = matcher.group(2).strip()
|
||||
value = (url, None, resolution)
|
||||
name = format_channel_name(key)
|
||||
if name in channels:
|
||||
@ -74,7 +74,7 @@ async def get_channels_by_subscribe_urls(urls=None, callback=None):
|
||||
for subscribe_url in (urls if urls else config.subscribe_urls)
|
||||
]
|
||||
for future in futures:
|
||||
merge_objects(subscribe_results, future.result())
|
||||
subscribe_results = merge_objects(subscribe_results, future.result())
|
||||
session.close()
|
||||
pbar.close()
|
||||
return subscribe_results
|
||||
|
@ -279,13 +279,13 @@ def get_channel_url(text):
|
||||
Get the url from text
|
||||
"""
|
||||
url = None
|
||||
urlRegex = r"(http[s]?://)?(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\\(\\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+(:[0-9]+)?"
|
||||
urlRegex = r"((http|https)://)?((([0-9]{1,3}\.){3}[0-9]{1,3})|([a-zA-Z0-9-]+\.[a-zA-Z]{2,}))(:[0-9]+)?(/[a-zA-Z0-9-._~:/?#[\]@!$&'()*+,;=%]*)?"
|
||||
url_search = re.search(
|
||||
urlRegex,
|
||||
text,
|
||||
)
|
||||
if url_search:
|
||||
url = url_search.group()
|
||||
url = url_search.group().strip()
|
||||
return url
|
||||
|
||||
|
||||
|
@ -35,7 +35,9 @@ async def sort_urls_by_speed_and_resolution(infoList):
|
||||
Sort by speed and resolution
|
||||
"""
|
||||
response_times = await gather(*(get_speed(url) for url, _, _ in infoList))
|
||||
valid_responses = [(info, rt) for info, rt in zip(infoList, response_times)]
|
||||
valid_responses = [
|
||||
(info, rt) for info, rt in zip(infoList, response_times) if rt != float("inf")
|
||||
]
|
||||
|
||||
def extract_resolution(resolution_str):
|
||||
numbers = re.findall(r"\d+x\d+", resolution_str)
|
||||
|
Loading…
x
Reference in New Issue
Block a user