1
0

Merge pull request from Guovin/dev

Dev
This commit is contained in:
Govin 2024-12-05 15:42:02 +08:00 committed by GitHub
commit 5a9919f543
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 643 additions and 590 deletions

@ -26,6 +26,7 @@ opencc-python-reimplemented = "*"
fake-useragent = "*"
pillow = "*"
yt-dlp = "*"
m3u8 = "*"
[packages]
requests = "*"
@ -39,6 +40,7 @@ fake-useragent = "*"
gunicorn = "*"
pillow = "*"
yt-dlp = "*"
m3u8 = "*"
[requires]
python_version = "3.13"

1060
Pipfile.lock generated

File diff suppressed because it is too large Load Diff

@ -21,7 +21,7 @@ ipv_type_prefer = 自动
ipv4_num = 5
ipv6_num = 5
open_m3u_result = True
url_keywords_blacklist = epg.pw,skype.serv00.net,iptv.yjxfz.com,live-hls-web-ajb.getaj.net,live.goodiptv.club,hc73k3dhwo5gfkt.wcetv.com,stream1.freetv.fun,zw9999.cnstream.top
url_keywords_blacklist = epg.pw,skype.serv00.net,iptv.yjxfz.com,live-hls-web-ajb.getaj.net,live.goodiptv.club,hc73k3dhwo5gfkt.wcetv.com,stream1.freetv.fun,zw9999.cnstream.top,zsntlqj.xicp.net
open_subscribe = True
subscribe_urls = https://live.zbds.top/tv/iptv6.txt,https://live.zbds.top/tv/iptv4.txt,https://live.fanmingming.com/tv/m3u/ipv6.m3u,https://ghp.ci/https://raw.githubusercontent.com/joevess/IPTV/main/home.m3u8,https://aktv.top/live.txt,http://175.178.251.183:6689/live.txt,https://ghp.ci/https://raw.githubusercontent.com/kimwang1978/collect-tv-txt/main/merged_output.txt,https://m3u.ibert.me/txt/fmml_dv6.txt,https://m3u.ibert.me/txt/o_cn.txt,https://m3u.ibert.me/txt/j_iptv.txt,https://ghp.ci/https://raw.githubusercontent.com/xzw832/cmys/main/S_CCTV.txt,https://ghp.ci/https://raw.githubusercontent.com/xzw832/cmys/main/S_weishi.txt,http://itv.22m.top/ITVBox/tv/tvonline.txt,https://ghp.ci//https://raw.githubusercontent.com/asdjkl6/tv/tv/.m3u/整套直播源/测试/整套直播源/l.txt,https://ghp.ci//https://raw.githubusercontent.com/asdjkl6/tv/tv/.m3u/整套直播源/测试/整套直播源/kk.txt
open_multicast = True
@ -30,7 +30,7 @@ open_multicast_fofa = True
multicast_region_list = 全部
multicast_page_num = 1
open_proxy = False
open_driver = True
open_driver = False
open_hotel = True
open_hotel_foodie = False
open_hotel_fofa = True

31
main.py

@ -1,7 +1,17 @@
import asyncio
from utils.config import config
import copy
import pickle
from time import time
from tqdm import tqdm
import utils.constants as constants
from service.app import run_service
from updates.fofa import get_channels_by_fofa
from updates.hotel import get_channels_by_hotel
from updates.multicast import get_channels_by_multicast
from updates.online_search import get_channels_by_online_search
from updates.subscribe import get_channels_by_subscribe_urls
from utils.channel import (
get_channel_items,
append_total_data,
@ -10,6 +20,7 @@ from utils.channel import (
get_channel_data_cache_with_compare,
format_channel_url_info,
)
from utils.config import config
from utils.tools import (
update_file,
get_pbar_remaining,
@ -20,20 +31,12 @@ from utils.tools import (
check_ipv6_support,
resource_path,
)
from updates.subscribe import get_channels_by_subscribe_urls
from updates.multicast import get_channels_by_multicast
from updates.hotel import get_channels_by_hotel
from updates.fofa import get_channels_by_fofa
from updates.online_search import get_channels_by_online_search
from tqdm import tqdm
from time import time
import pickle
import copy
class UpdateSource:
def __init__(self):
self.update_progress = None
self.run_ui = False
self.tasks = []
self.channel_items = {}
@ -62,7 +65,7 @@ class UpdateSource:
for setting, task_func, result_attr in tasks_config:
if (
setting == "hotel_foodie" or setting == "hotel_fofa"
setting == "hotel_foodie" or setting == "hotel_fofa"
) and config.open_hotel == False:
continue
if config.open_method[setting]:
@ -160,8 +163,8 @@ class UpdateSource:
channel_data_cache, self.channel_data
)
with open(
resource_path(constants.cache_path, persistent=True),
"wb",
resource_path(constants.cache_path, persistent=True),
"wb",
) as file:
pickle.dump(channel_data_cache, file)
convert_to_m3u()
@ -169,8 +172,8 @@ class UpdateSource:
print(
f"🥳 Update completed! Total time spent: {total_time}. Please check the {user_final_file} file!"
)
open_service = config.open_service
if self.run_ui:
open_service = config.open_service
service_tip = ", 可使用以下链接观看直播:" if open_service else ""
tip = (
f"✅ 服务启动成功{service_tip}"

Binary file not shown.

@ -1,5 +1,22 @@
from utils.config import config
import asyncio
import base64
import copy
import datetime
import os
import pickle
import re
from collections import defaultdict
from logging import INFO
from bs4 import NavigableString
from opencc import OpenCC
import utils.constants as constants
from utils.config import config
from utils.speed import (
get_speed,
sort_urls_by_speed_and_resolution,
)
from utils.tools import (
check_url_by_patterns,
get_total_urls_from_info_list,
@ -10,21 +27,6 @@ from utils.tools import (
write_content_into_txt,
get_logger,
)
from utils.speed import (
get_speed,
sort_urls_by_speed_and_resolution,
)
import os
from collections import defaultdict
import re
from bs4 import NavigableString
from opencc import OpenCC
import base64
import pickle
import copy
import datetime
import asyncio
from logging import INFO
def get_name_url(content, pattern, multiline=False, check_url=True):
@ -207,9 +209,9 @@ def get_channel_multicast_region_type_list(result):
for region_type in result.values()
for region, types in region_type.items()
if "all" in region_list
or "ALL" in region_list
or "全部" in region_list
or region in region_list
or "ALL" in region_list
or "全部" in region_list
or region in region_list
for type in types
}
return list(region_type_list)
@ -449,9 +451,9 @@ def append_data_to_info_data(info_data, cate, name, data, origin=None, check=Tru
if pure_url in urls:
continue
if (
url_origin == "important"
or (not check)
or (check and check_url_by_patterns(pure_url))
url_origin == "important"
or (not check)
or (check and check_url_by_patterns(pure_url))
):
info_data[cate][name].append((url, date, resolution, url_origin))
urls.append(pure_url)
@ -480,14 +482,14 @@ def append_old_data_to_info_data(info_data, cate, name, data):
def append_total_data(
items,
names,
data,
hotel_fofa_result=None,
multicast_result=None,
hotel_foodie_result=None,
subscribe_result=None,
online_search_result=None,
items,
names,
data,
hotel_fofa_result=None,
multicast_result=None,
hotel_foodie_result=None,
subscribe_result=None,
online_search_result=None,
):
"""
Append all method data to total info data
@ -547,7 +549,7 @@ def append_total_data(
async def process_sort_channel_list(data, ipv6=False, callback=None):
"""
Processs the sort channel list
Process the sort channel list
"""
ipv6_proxy = None if (not config.open_ipv6 or ipv6) else constants.ipv6_proxy
need_sort_data = copy.deepcopy(data)
@ -640,7 +642,7 @@ def get_multicast_fofa_search_org(region, type):
elif type == "电信":
org = "Chinanet"
elif type == "移动":
org == "China Mobile communications corporation"
org = "China Mobile communications corporation"
return org
@ -658,14 +660,14 @@ def get_multicast_fofa_search_urls():
(parts[0], parts[1])
for name in rtp_file_names
if (parts := name.partition("_"))[0] in region_list
or "all" in region_list
or "ALL" in region_list
or "全部" in region_list
or "all" in region_list
or "ALL" in region_list
or "全部" in region_list
]
search_urls = []
for region, type in region_type_list:
search_url = "https://fofa.info/result?qbase64="
search_txt = f'"udpxy" && country="CN" && region="{region}" && org="{get_multicast_fofa_search_org(region,type)}"'
search_txt = f'"udpxy" && country="CN" && region="{region}" && org="{get_multicast_fofa_search_org(region, type)}"'
bytes_string = search_txt.encode("utf-8")
search_txt = base64.b64encode(bytes_string).decode("utf-8")
search_url += search_txt

@ -2,7 +2,9 @@ import asyncio
import re
import subprocess
from time import time
from urllib.parse import quote
import m3u8
import yt_dlp
from aiohttp import ClientSession, TCPConnector
@ -13,6 +15,62 @@ from utils.tools import is_ipv6, remove_cache_info, get_resolution_value, get_lo
logger = get_logger(constants.log_path)
async def get_speed_with_download(url, timeout=config.sort_timeout):
"""
Get the speed of the url with a total timeout
"""
start_time = time()
total_size = 0
total_time = 0
try:
async with ClientSession(
connector=TCPConnector(ssl=False), trust_env=True
) as session:
async with session.get(url, timeout=timeout) as response:
async for chunk in response.content.iter_any():
if chunk:
total_size += len(chunk)
except Exception as e:
pass
finally:
end_time = time()
total_time += end_time - start_time
average_speed = (total_size / total_time if total_time > 0 else 0) / 1024
return average_speed
async def get_speed_m3u8(url, timeout=config.sort_timeout):
"""
Get the speed of the m3u8 url with a total timeout
"""
start_time = time()
total_size = 0
total_time = 0
try:
url = quote(url, safe=':/?$&=@')
m3u8_obj = m3u8.load(url)
async with ClientSession(
connector=TCPConnector(ssl=False), trust_env=True
) as session:
for segment in m3u8_obj.segments:
if time() - start_time > timeout:
break
ts_url = segment.absolute_uri
async with session.get(ts_url, timeout=timeout) as response:
file_size = 0
async for chunk in response.content.iter_any():
if chunk:
file_size += len(chunk)
end_time = time()
download_time = end_time - start_time
total_size += file_size
total_time += download_time
except Exception as e:
pass
average_speed = (total_size / total_time if total_time > 0 else 0) / 1024
return average_speed
def get_info_yt_dlp(url, timeout=config.sort_timeout):
"""
Get the url info by yt_dlp
@ -54,7 +112,7 @@ async def get_speed_requests(url, timeout=config.sort_timeout, proxy=None):
Get the speed of the url by requests
"""
async with ClientSession(
connector=TCPConnector(verify_ssl=False), trust_env=True
connector=TCPConnector(ssl=False), trust_env=True
) as session:
start = time()
end = None
@ -171,6 +229,8 @@ async def get_speed(url, ipv6_proxy=None, callback=None):
return speed_cache[cache_key][0]
if ipv6_proxy and url_is_ipv6:
speed = (0, None)
# elif '.m3u8' in url:
# speed = await get_speed_m3u8(url)
else:
speed = await get_speed_yt_dlp(url)
if cache_key and cache_key not in speed_cache: