refactor:config()

This commit is contained in:
guorong.zheng 2024-08-12 15:55:00 +08:00
parent f656074028
commit e46c025251
17 changed files with 320 additions and 272 deletions

@ -35,10 +35,12 @@ jobs:
run: | run: |
echo "OPEN_DRIVER=$(python -c ' echo "OPEN_DRIVER=$(python -c '
try: try:
import user_config as config from utils.config import get_config
except ImportError: config = get_config()
import config open_driver = config.getboolean("Settings", "open_driver")
print(config.open_driver)')" >> $GITHUB_ENV except:
open_driver = False
print(open_driver)')" >> $GITHUB_ENV
- name: Install Selenium - name: Install Selenium
if: env.OPEN_DRIVER == 'True' if: env.OPEN_DRIVER == 'True'
run: | run: |
@ -72,10 +74,12 @@ jobs:
git diff git diff
final_file=$(python -c ' final_file=$(python -c '
try: try:
import user_config as config from utils.config import get_config
except ImportError: config = get_config()
import config final_file = config.get("Settings", "final_file")
print(config.final_file)') except:
final_file = "output/result.txt"
print(final_file)')
if [[ -f "$final_file" ]]; then if [[ -f "$final_file" ]]; then
git add -f "$final_file" git add -f "$final_file"
fi fi

@ -1,41 +0,0 @@
open_update = True
open_use_old_result = True
source_file = "demo.txt"
final_file = "result.txt"
favorite_list = [
"广东珠江",
"CCTV-1",
"CCTV-5",
"CCTV-5+",
"CCTV-13",
"广东体育",
"广东卫视",
"大湾区卫视",
"浙江卫视",
"湖南卫视",
"翡翠台",
]
open_online_search = False
favorite_page_num = 5
default_page_num = 3
urls_limit = 15
open_keep_all = False
open_sort = True
response_time_weight = 0.5
resolution_weight = 0.5
recent_days = 30
ipv_type = "ipv4"
domain_blacklist = ["epg.pw"]
url_keywords_blacklist = []
open_subscribe = False
subscribe_urls = [
"https://m3u.ibert.me/txt/fmml_dv6.txt",
"https://m3u.ibert.me/txt/o_cn.txt",
"https://m3u.ibert.me/txt/j_iptv.txt",
"https://github.moeyy.xyz/https://raw.githubusercontent.com/PizazzGY/TVBox/main/live.txt",
]
open_multicast = True
region_list = ["广东"]
open_proxy = False
open_driver = False
open_use_old_result = True

30
config/config.ini Normal file

@ -0,0 +1,30 @@
[Settings]
open_update = True
open_use_old_result = True
source_file = config/demo.txt
final_file = output/result.txt
favorite_list = 广东珠江,CCTV-1,CCTV-5,CCTV-5+,CCTV-13,广东体育,广东卫视,大湾区卫视,浙江卫视,湖南卫视,翡翠台
open_online_search = False
favorite_page_num = 5
default_page_num = 3
urls_limit = 15
open_keep_all = False
open_sort = True
response_time_weight = 0.5
resolution_weight = 0.5
recent_days = 30
ipv_type = ipv4
domain_blacklist = epg.pw
url_keywords_blacklist =
open_subscribe = False
subscribe_urls = https://m3u.ibert.me/txt/fmml_dv6.txt,https://m3u.ibert.me/txt/o_cn.txt,https://m3u.ibert.me/txt/j_iptv.txt,https://github.moeyy.xyz/https://raw.githubusercontent.com/PizazzGY/TVBox/main/live.txt
open_multicast = True
region_list = 广东
open_proxy = False
open_driver = False

@ -1,7 +1,4 @@
from selenium import webdriver from selenium import webdriver
from utils.config import get_config
config = get_config()
def setup_driver(proxy=None): def setup_driver(proxy=None):

28
main.py

@ -30,7 +30,7 @@ app = Flask(__name__)
@app.route("/") @app.route("/")
def show_result(): def show_result():
user_final_file = getattr(config, "final_file", "result.txt") user_final_file = config.get("Settings", "final_file")
with open(user_final_file, "r", encoding="utf-8") as file: with open(user_final_file, "r", encoding="utf-8") as file:
content = file.read() content = file.read()
return render_template_string("<pre>{{ content }}</pre>", content=content) return render_template_string("<pre>{{ content }}</pre>", content=content)
@ -51,19 +51,19 @@ class UpdateSource:
self.start_time = None self.start_time = None
async def visit_page(self, channel_names=None): async def visit_page(self, channel_names=None):
if config.open_subscribe: if config.getboolean("Settings", "open_subscribe"):
subscribe_task = asyncio.create_task( subscribe_task = asyncio.create_task(
get_channels_by_subscribe_urls(callback=self.update_progress) get_channels_by_subscribe_urls(callback=self.update_progress)
) )
self.tasks.append(subscribe_task) self.tasks.append(subscribe_task)
self.subscribe_result = await subscribe_task self.subscribe_result = await subscribe_task
if config.open_multicast: if config.getboolean("Settings", "open_multicast"):
multicast_task = asyncio.create_task( multicast_task = asyncio.create_task(
get_channels_by_multicast(channel_names, self.update_progress) get_channels_by_multicast(channel_names, self.update_progress)
) )
self.tasks.append(multicast_task) self.tasks.append(multicast_task)
self.multicast_result = await multicast_task self.multicast_result = await multicast_task
if config.open_online_search: if config.getboolean("Settings", "open_online_search"):
online_search_task = asyncio.create_task( online_search_task = asyncio.create_task(
get_channels_by_online_search(channel_names, self.update_progress) get_channels_by_online_search(channel_names, self.update_progress)
) )
@ -94,7 +94,7 @@ class UpdateSource:
self.multicast_result, self.multicast_result,
self.online_search_result, self.online_search_result,
) )
if config.open_sort: if config.getboolean("Settings", "open_sort"):
is_ffmpeg = is_ffmpeg_installed() is_ffmpeg = is_ffmpeg_installed()
if not is_ffmpeg: if not is_ffmpeg:
print("FFmpeg is not installed, using requests for sorting.") print("FFmpeg is not installed, using requests for sorting.")
@ -137,15 +137,15 @@ class UpdateSource:
lambda: self.pbar_update("写入结果"), lambda: self.pbar_update("写入结果"),
) )
self.pbar.close() self.pbar.close()
user_final_file = getattr(config, "final_file", "result.txt") user_final_file = config.get("Settings", "final_file")
update_file(user_final_file, "result_new.txt") update_file(user_final_file, "output/result_new.txt")
if config.open_sort: if config.getboolean("Settings", "open_sort"):
user_log_file = ( user_log_file = "output/" + (
"user_result.log" "user_result.log"
if os.path.exists("user_config.py") if os.path.exists("config/user_config.ini")
else "result.log" else "result.log"
) )
update_file(user_log_file, "result_new.log") update_file(user_log_file, "output/result_new.log")
print(f"Update completed! Please check the {user_final_file} file!") print(f"Update completed! Please check the {user_final_file} file!")
if not os.environ.get("GITHUB_ACTIONS"): if not os.environ.get("GITHUB_ACTIONS"):
print(f"You can access the result at {get_ip_address()}") print(f"You can access the result at {get_ip_address()}")
@ -165,10 +165,10 @@ class UpdateSource:
self.update_progress = callback or default_callback self.update_progress = callback or default_callback
self.run_ui = True if callback else False self.run_ui = True if callback else False
if config.open_update: if config.getboolean("Settings", "open_update"):
await self.main() await self.main()
if self.run_ui: if self.run_ui:
if not config.open_update: if not config.getboolean("Settings", "open_update"):
print(f"You can access the result at {get_ip_address()}") print(f"You can access the result at {get_ip_address()}")
self.update_progress( self.update_progress(
f"服务启动成功, 可访问以下链接:", f"服务启动成功, 可访问以下链接:",
@ -187,7 +187,7 @@ class UpdateSource:
def scheduled_task(): def scheduled_task():
if config.open_update: if config.getboolean("Settings", "open_update"):
update_source = UpdateSource() update_source = UpdateSource()
loop = asyncio.new_event_loop() loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop) asyncio.set_event_loop(loop)

@ -2,7 +2,6 @@ import requests
import re import re
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
from fake_useragent import UserAgent from fake_useragent import UserAgent
from time import sleep
headers = { headers = {
"Accept": "*/*", "Accept": "*/*",

@ -3,20 +3,14 @@ from tkinter import messagebox
from tkinter import scrolledtext from tkinter import scrolledtext
from tkinter import ttk from tkinter import ttk
from tkinter import filedialog from tkinter import filedialog
from utils.config import resource_path, load_external_config from utils.config import get_config, resource_path
from main import UpdateSource from main import UpdateSource
import os import os
import asyncio import asyncio
import threading import threading
import webbrowser import webbrowser
config_path = resource_path("user_config.py") config = get_config()
default_config_path = resource_path("config.py")
config = (
load_external_config("user_config.py")
if os.path.exists(config_path)
else load_external_config("config.py")
)
class TkinterUI: class TkinterUI:
@ -56,14 +50,13 @@ class TkinterUI:
] ]
self.result_url = None self.result_url = None
def format_list(self, text):
return [f"{item.strip()}" for item in text.split(",") if item.strip()]
def update_open_update(self): def update_open_update(self):
config.open_update = self.open_update_var.get() config.set("Settings", "open_update", str(self.open_update_var.get()))
def update_open_use_old_result(self): def update_open_use_old_result(self):
config.open_use_old_result = self.open_use_old_result_var.get() config.set(
"Settings", "open_use_old_result", str(self.open_use_old_result_var.get())
)
def select_source_file(self): def select_source_file(self):
filepath = filedialog.askopenfilename( filepath = filedialog.askopenfilename(
@ -72,7 +65,7 @@ class TkinterUI:
if filepath: if filepath:
self.source_file_entry.delete(0, tk.END) self.source_file_entry.delete(0, tk.END)
self.source_file_entry.insert(0, filepath) self.source_file_entry.insert(0, filepath)
config.source_file = filepath config.set("Settings", "source_file", filepath)
def select_final_file(self): def select_final_file(self):
filepath = filedialog.askopenfilename( filepath = filedialog.askopenfilename(
@ -81,77 +74,95 @@ class TkinterUI:
if filepath: if filepath:
self.final_file_entry.delete(0, tk.END) self.final_file_entry.delete(0, tk.END)
self.final_file_entry.insert(0, filepath) self.final_file_entry.insert(0, filepath)
config.final_file = filepath config.set("Settings", "final_file", filepath)
def update_open_subscribe(self): def update_open_subscribe(self):
config.open_subscribe = self.open_subscribe_var.get() config.set("Settings", "open_subscribe", str(self.open_subscribe_var.get()))
def update_open_multicast(self): def update_open_multicast(self):
config.open_multicast = self.open_multicast_var.get() config.set("Settings", "open_multicast", str(self.open_multicast_var.get()))
def update_open_online_search(self): def update_open_online_search(self):
config.open_online_search = self.open_online_search_var.get() config.set(
"Settings", "open_online_search", str(self.open_online_search_var.get())
)
def update_open_driver(self): def update_open_driver(self):
config.open_driver = self.open_driver_var.get() config.set("Settings", "open_driver", str(self.open_driver_var.get()))
def update_open_proxy(self): def update_open_proxy(self):
config.open_proxy = self.open_proxy_var.get() config.set("Settings", "open_proxy", str(self.open_proxy_var.get()))
def update_open_keep_all(self): def update_open_keep_all(self):
config.open_keep_all = self.open_keep_all_var.get() config.set("Settings", "open_keep_all", str(self.open_keep_all_var.get()))
def update_open_sort(self): def update_open_sort(self):
config.open_sort = self.open_sort_var.get() config.set("Settings", "open_sort", str(self.open_sort_var.get()))
def update_favorite_list(self, event): def update_favorite_list(self, event):
config.favorite_list = self.format_list( config.set(
self.favorite_list_text.get(1.0, tk.END) "Settings",
"favorite_list",
self.favorite_list_text.get(1.0, tk.END),
) )
def update_favorite_page_num(self, event): def update_favorite_page_num(self, event):
config.favorite_page_num = self.favorite_page_num_entry.get() config.set("Settings", "favorite_page_num", self.favorite_page_num_entry.get())
def update_default_page_num(self, event): def update_default_page_num(self, event):
config.default_page_num = self.default_page_num_entry.get() config.set("Settings", "default_page_num", self.default_page_num_entry.get())
def update_urls_limit(self, event): def update_urls_limit(self, event):
config.urls_limit = self.urls_limit_entry.get() config.set("Settings", "urls_limit", self.urls_limit_entry.get())
def update_response_time_weight(self, event): def update_response_time_weight(self, event):
config.response_time_weight = self.response_time_weight_entry.get() config.set(
"Settings", "response_time_weight", self.response_time_weight_entry.get()
)
def update_resolution_weight(self, event): def update_resolution_weight(self, event):
config.resolution_weight = self.resolution_weight_entry.get() config.set("Settings", "resolution_weight", self.resolution_weight_entry.get())
def update_ipv_type(self, event): def update_ipv_type(self, event):
config.ipv_type = f'"{self.ipv_type_combo.get()}"' config.set("Settings", "ipv_type", self.ipv_type_combo.get())
def update_recent_days(self, event): def update_recent_days(self, event):
config.recent_days = self.recent_days_entry.get() config.set("Settings", "recent_days", self.recent_days_entry.get())
def update_url_keywords_blacklist(self, event): def update_url_keywords_blacklist(self, event):
config.url_keywords_blacklist = self.format_list( config.set(
self.url_keywords_blacklist_text.get(1.0, tk.END) "Settings",
"url_keywords_blacklist",
self.url_keywords_blacklist_text.get(1.0, tk.END),
) )
def update_domain_blacklist(self, event): def update_domain_blacklist(self, event):
config.domain_blacklist = self.format_list( config.set(
self.domain_blacklist_text.get(1.0, tk.END) "Settings",
"domain_blacklist",
self.domain_blacklist_text.get(1.0, tk.END),
) )
def update_url_keywords_blacklist(self, event): def update_url_keywords_blacklist(self, event):
config.url_keywords_blacklist = self.format_list( config.set(
self.url_keywords_blacklist_text.get(1.0, tk.END) "Settings",
"url_keywords_blacklist",
self.url_keywords_blacklist_text.get(1.0, tk.END),
) )
def update_subscribe_urls(self, event): def update_subscribe_urls(self, event):
config.subscribe_urls = self.format_list( config.set(
self.subscribe_urls_text.get(1.0, tk.END) "Settings",
"subscribe_urls",
self.subscribe_urls_text.get(1.0, tk.END),
) )
def update_region_list(self, event): def update_region_list(self, event):
config.region_list = self.format_list(self.region_list_text.get(1.0, tk.END)) config.set(
"Settings",
"region_list",
self.region_list_text.get(1.0, tk.END),
)
def view_result_link_callback(self, event): def view_result_link_callback(self, event):
webbrowser.open_new_tab(self.result_url) webbrowser.open_new_tab(self.result_url)
@ -160,9 +171,9 @@ class TkinterUI:
config_values = { config_values = {
"open_update": self.open_update_var.get(), "open_update": self.open_update_var.get(),
"open_use_old_result": self.open_use_old_result_var.get(), "open_use_old_result": self.open_use_old_result_var.get(),
"source_file": f'"{self.source_file_entry.get()}"', "source_file": self.source_file_entry.get(),
"final_file": f'"{self.final_file_entry.get()}"', "final_file": self.final_file_entry.get(),
"favorite_list": self.format_list(self.favorite_list_text.get(1.0, tk.END)), "favorite_list": self.favorite_list_text.get(1.0, tk.END),
"open_online_search": self.open_online_search_var.get(), "open_online_search": self.open_online_search_var.get(),
"favorite_page_num": self.favorite_page_num_entry.get(), "favorite_page_num": self.favorite_page_num_entry.get(),
"default_page_num": self.default_page_num_entry.get(), "default_page_num": self.default_page_num_entry.get(),
@ -174,31 +185,24 @@ class TkinterUI:
"response_time_weight": self.response_time_weight_entry.get(), "response_time_weight": self.response_time_weight_entry.get(),
"resolution_weight": self.resolution_weight_entry.get(), "resolution_weight": self.resolution_weight_entry.get(),
"recent_days": self.recent_days_entry.get(), "recent_days": self.recent_days_entry.get(),
"ipv_type": f'"{self.ipv_type_combo.get()}"', "ipv_type": self.ipv_type_combo.get(),
"domain_blacklist": self.format_list( "domain_blacklist": self.domain_blacklist_text.get(1.0, tk.END),
self.domain_blacklist_text.get(1.0, tk.END) "url_keywords_blacklist": self.url_keywords_blacklist_text.get(1.0, tk.END),
),
"url_keywords_blacklist": self.format_list(
self.url_keywords_blacklist_text.get(1.0, tk.END)
),
"open_subscribe": self.open_subscribe_var.get(), "open_subscribe": self.open_subscribe_var.get(),
"subscribe_urls": self.format_list( "subscribe_urls": self.subscribe_urls_text.get(1.0, tk.END),
self.subscribe_urls_text.get(1.0, tk.END)
),
"open_multicast": self.open_multicast_var.get(), "open_multicast": self.open_multicast_var.get(),
"region_list": self.format_list(self.region_list_text.get(1.0, tk.END)), "region_list": self.region_list_text.get(1.0, tk.END),
} }
for key, value in config_values.items(): for key, value in config_values.items():
setattr(config, key, value) config.set("Settings", key, str(value))
user_config_file = ( user_config_file = "config/" + (
"user_config.py" if os.path.exists("user_config.py") else "config.py" "user_config.ini" if os.path.exists("user_config.ini") else "config.ini"
) )
with open( with open(
resource_path(user_config_file, persistent=True), "w", encoding="utf-8" resource_path(user_config_file, persistent=True), "w", encoding="utf-8"
) as f: ) as configfile:
for key, value in config_values.items(): config.write(configfile)
f.write(f"{key} = {value}\n")
messagebox.showinfo("提示", "保存成功") messagebox.showinfo("提示", "保存成功")
async def run_update(self): async def run_update(self):
@ -274,7 +278,9 @@ class TkinterUI:
frame1_open_update_column1, text="开启更新:", width=8 frame1_open_update_column1, text="开启更新:", width=8
) )
self.open_update_label.pack(side=tk.LEFT, padx=4, pady=8) self.open_update_label.pack(side=tk.LEFT, padx=4, pady=8)
self.open_update_var = tk.BooleanVar(value=config.open_update) self.open_update_var = tk.BooleanVar(
value=config.getboolean("Settings", "open_update")
)
self.open_update_checkbutton = ttk.Checkbutton( self.open_update_checkbutton = ttk.Checkbutton(
frame1_open_update_column1, frame1_open_update_column1,
variable=self.open_update_var, variable=self.open_update_var,
@ -289,7 +295,9 @@ class TkinterUI:
frame1_open_update_column2, text="使用历史结果:", width=12 frame1_open_update_column2, text="使用历史结果:", width=12
) )
self.open_use_old_result_label.pack(side=tk.LEFT, padx=4, pady=8) self.open_use_old_result_label.pack(side=tk.LEFT, padx=4, pady=8)
self.open_use_old_result_var = tk.BooleanVar(value=config.open_use_old_result) self.open_use_old_result_var = tk.BooleanVar(
value=config.getboolean("Settings", "open_use_old_result")
)
self.open_use_old_result_checkbutton = ttk.Checkbutton( self.open_use_old_result_checkbutton = ttk.Checkbutton(
frame1_open_update_column2, frame1_open_update_column2,
variable=self.open_use_old_result_var, variable=self.open_use_old_result_var,
@ -307,7 +315,7 @@ class TkinterUI:
self.source_file_entry = tk.Entry(frame1_source_file) self.source_file_entry = tk.Entry(frame1_source_file)
self.source_file_label.pack(side=tk.LEFT, padx=4, pady=8) self.source_file_label.pack(side=tk.LEFT, padx=4, pady=8)
self.source_file_entry.pack(fill=tk.X, padx=4, expand=True) self.source_file_entry.pack(fill=tk.X, padx=4, expand=True)
self.source_file_entry.insert(0, config.source_file) self.source_file_entry.insert(0, config.get("Settings", "source_file"))
frame1_source_file_select = tk.Frame(frame1) frame1_source_file_select = tk.Frame(frame1)
frame1_source_file_select.pack(fill=tk.X) frame1_source_file_select.pack(fill=tk.X)
@ -324,7 +332,7 @@ class TkinterUI:
self.final_file_entry = tk.Entry(frame1_final_file) self.final_file_entry = tk.Entry(frame1_final_file)
self.final_file_label.pack(side=tk.LEFT, padx=4, pady=8) self.final_file_label.pack(side=tk.LEFT, padx=4, pady=8)
self.final_file_entry.pack(fill=tk.X, padx=4, expand=True) self.final_file_entry.pack(fill=tk.X, padx=4, expand=True)
self.final_file_entry.insert(0, config.final_file) self.final_file_entry.insert(0, config.get("Settings", "final_file"))
frame1_final_file_select = tk.Frame(frame1) frame1_final_file_select = tk.Frame(frame1)
frame1_final_file_select.pack(fill=tk.X) frame1_final_file_select.pack(fill=tk.X)
@ -345,7 +353,9 @@ class TkinterUI:
frame1_mode_params_column1, text="浏览器模式:", width=12 frame1_mode_params_column1, text="浏览器模式:", width=12
) )
self.open_driver_label.pack(side=tk.LEFT, padx=4, pady=8) self.open_driver_label.pack(side=tk.LEFT, padx=4, pady=8)
self.open_driver_var = tk.BooleanVar(value=config.open_driver) self.open_driver_var = tk.BooleanVar(
value=config.getboolean("Settings", "open_driver")
)
self.open_driver_checkbutton = ttk.Checkbutton( self.open_driver_checkbutton = ttk.Checkbutton(
frame1_mode_params_column1, frame1_mode_params_column1,
variable=self.open_driver_var, variable=self.open_driver_var,
@ -360,7 +370,9 @@ class TkinterUI:
frame1_mode_params_column2, text="开启代理:", width=12 frame1_mode_params_column2, text="开启代理:", width=12
) )
self.open_proxy_label.pack(side=tk.LEFT, padx=4, pady=8) self.open_proxy_label.pack(side=tk.LEFT, padx=4, pady=8)
self.open_proxy_var = tk.BooleanVar(value=config.open_proxy) self.open_proxy_var = tk.BooleanVar(
value=config.getboolean("Settings", "open_proxy")
)
self.open_proxy_checkbutton = ttk.Checkbutton( self.open_proxy_checkbutton = ttk.Checkbutton(
frame1_mode_params_column2, frame1_mode_params_column2,
variable=self.open_proxy_var, variable=self.open_proxy_var,
@ -384,7 +396,7 @@ class TkinterUI:
self.urls_limit_label.pack(side=tk.LEFT, padx=4, pady=8) self.urls_limit_label.pack(side=tk.LEFT, padx=4, pady=8)
self.urls_limit_entry = tk.Entry(frame1_channel_column1) self.urls_limit_entry = tk.Entry(frame1_channel_column1)
self.urls_limit_entry.pack(side=tk.LEFT, padx=4, pady=8) self.urls_limit_entry.pack(side=tk.LEFT, padx=4, pady=8)
self.urls_limit_entry.insert(15, config.urls_limit) self.urls_limit_entry.insert(15, config.getint("Settings", "urls_limit"))
self.urls_limit_entry.bind("<KeyRelease>", self.update_urls_limit) self.urls_limit_entry.bind("<KeyRelease>", self.update_urls_limit)
self.ipv_type_label = tk.Label( self.ipv_type_label = tk.Label(
@ -408,7 +420,9 @@ class TkinterUI:
frame1_sort_column1, text="保留模式:", width=12 frame1_sort_column1, text="保留模式:", width=12
) )
self.open_keep_all_label.pack(side=tk.LEFT, padx=4, pady=8) self.open_keep_all_label.pack(side=tk.LEFT, padx=4, pady=8)
self.open_keep_all_var = tk.BooleanVar(value=config.open_keep_all) self.open_keep_all_var = tk.BooleanVar(
value=config.getboolean("Settings", "open_keep_all")
)
self.open_keep_all_checkbutton = ttk.Checkbutton( self.open_keep_all_checkbutton = ttk.Checkbutton(
frame1_sort_column1, frame1_sort_column1,
variable=self.open_keep_all_var, variable=self.open_keep_all_var,
@ -423,7 +437,9 @@ class TkinterUI:
frame1_sort_column2, text="开启测速排序:", width=12 frame1_sort_column2, text="开启测速排序:", width=12
) )
self.open_sort_label.pack(side=tk.LEFT, padx=4, pady=8) self.open_sort_label.pack(side=tk.LEFT, padx=4, pady=8)
self.open_sort_var = tk.BooleanVar(value=config.open_sort) self.open_sort_var = tk.BooleanVar(
value=config.getboolean("Settings", "open_sort")
)
self.open_sort_checkbutton = ttk.Checkbutton( self.open_sort_checkbutton = ttk.Checkbutton(
frame1_sort_column2, frame1_sort_column2,
variable=self.open_sort_var, variable=self.open_sort_var,
@ -446,7 +462,9 @@ class TkinterUI:
self.response_time_weight_label.pack(side=tk.LEFT, padx=4, pady=8) self.response_time_weight_label.pack(side=tk.LEFT, padx=4, pady=8)
self.response_time_weight_entry = tk.Entry(frame1_sort_params_column1) self.response_time_weight_entry = tk.Entry(frame1_sort_params_column1)
self.response_time_weight_entry.pack(side=tk.LEFT, padx=4, pady=8) self.response_time_weight_entry.pack(side=tk.LEFT, padx=4, pady=8)
self.response_time_weight_entry.insert(0, config.response_time_weight) self.response_time_weight_entry.insert(
0, config.getfloat("Settings", "response_time_weight")
)
self.response_time_weight_entry.bind( self.response_time_weight_entry.bind(
"<KeyRelease>", self.update_response_time_weight "<KeyRelease>", self.update_response_time_weight
) )
@ -457,7 +475,9 @@ class TkinterUI:
self.resolution_weight_label.pack(side=tk.LEFT, padx=4, pady=8) self.resolution_weight_label.pack(side=tk.LEFT, padx=4, pady=8)
self.resolution_weight_entry = tk.Entry(frame1_sort_params_column2) self.resolution_weight_entry = tk.Entry(frame1_sort_params_column2)
self.resolution_weight_entry.pack(side=tk.LEFT, padx=4, pady=8) self.resolution_weight_entry.pack(side=tk.LEFT, padx=4, pady=8)
self.resolution_weight_entry.insert(0, config.resolution_weight) self.resolution_weight_entry.insert(
0, config.getfloat("Settings", "resolution_weight")
)
self.resolution_weight_entry.bind("<KeyRelease>", self.update_resolution_weight) self.resolution_weight_entry.bind("<KeyRelease>", self.update_resolution_weight)
frame1_domain_blacklist = tk.Frame(frame1) frame1_domain_blacklist = tk.Frame(frame1)
@ -473,7 +493,9 @@ class TkinterUI:
self.domain_blacklist_text.pack( self.domain_blacklist_text.pack(
side=tk.LEFT, padx=4, pady=8, expand=True, fill=tk.BOTH side=tk.LEFT, padx=4, pady=8, expand=True, fill=tk.BOTH
) )
self.domain_blacklist_text.insert(tk.END, ",".join(config.domain_blacklist)) self.domain_blacklist_text.insert(
tk.END, config.get("Settings", "domain_blacklist")
)
self.domain_blacklist_text.bind("<KeyRelease>", self.update_domain_blacklist) self.domain_blacklist_text.bind("<KeyRelease>", self.update_domain_blacklist)
frame1_url_keywords_blacklist = tk.Frame(frame1) frame1_url_keywords_blacklist = tk.Frame(frame1)
@ -490,7 +512,7 @@ class TkinterUI:
side=tk.LEFT, padx=4, pady=8, expand=True, fill=tk.BOTH side=tk.LEFT, padx=4, pady=8, expand=True, fill=tk.BOTH
) )
self.url_keywords_blacklist_text.insert( self.url_keywords_blacklist_text.insert(
tk.END, ",".join(config.url_keywords_blacklist) tk.END, config.get("Settings", "url_keywords_blacklist")
) )
self.url_keywords_blacklist_text.bind( self.url_keywords_blacklist_text.bind(
"<KeyRelease>", self.update_url_keywords_blacklist "<KeyRelease>", self.update_url_keywords_blacklist
@ -503,7 +525,9 @@ class TkinterUI:
frame2_open_online_search, text="开启在线搜索:", width=13 frame2_open_online_search, text="开启在线搜索:", width=13
) )
self.open_online_search_label.pack(side=tk.LEFT, padx=4, pady=8) self.open_online_search_label.pack(side=tk.LEFT, padx=4, pady=8)
self.open_online_search_var = tk.BooleanVar(value=config.open_online_search) self.open_online_search_var = tk.BooleanVar(
value=config.getboolean("Settings", "open_online_search")
)
self.open_online_search_checkbutton = ttk.Checkbutton( self.open_online_search_checkbutton = ttk.Checkbutton(
frame2_open_online_search, frame2_open_online_search,
variable=self.open_online_search_var, variable=self.open_online_search_var,
@ -526,7 +550,7 @@ class TkinterUI:
self.favorite_list_text.pack( self.favorite_list_text.pack(
side=tk.LEFT, padx=4, pady=8, expand=True, fill=tk.BOTH side=tk.LEFT, padx=4, pady=8, expand=True, fill=tk.BOTH
) )
self.favorite_list_text.insert(tk.END, ",".join(config.favorite_list)) self.favorite_list_text.insert(tk.END, config.get("Settings", "favorite_list"))
self.favorite_list_text.bind("<KeyRelease>", self.update_favorite_list) self.favorite_list_text.bind("<KeyRelease>", self.update_favorite_list)
frame2_favorite_page_num = tk.Frame(frame2) frame2_favorite_page_num = tk.Frame(frame2)
@ -538,7 +562,9 @@ class TkinterUI:
self.favorite_page_num_label.pack(side=tk.LEFT, padx=4, pady=8) self.favorite_page_num_label.pack(side=tk.LEFT, padx=4, pady=8)
self.favorite_page_num_entry = tk.Entry(frame2_favorite_page_num) self.favorite_page_num_entry = tk.Entry(frame2_favorite_page_num)
self.favorite_page_num_entry.pack(side=tk.LEFT, padx=4, pady=8) self.favorite_page_num_entry.pack(side=tk.LEFT, padx=4, pady=8)
self.favorite_page_num_entry.insert(0, config.favorite_page_num) self.favorite_page_num_entry.insert(
0, config.getint("Settings", "favorite_page_num")
)
self.favorite_page_num_entry.bind("<KeyRelease>", self.update_favorite_page_num) self.favorite_page_num_entry.bind("<KeyRelease>", self.update_favorite_page_num)
frame2_default_page_num = tk.Frame(frame2) frame2_default_page_num = tk.Frame(frame2)
@ -550,7 +576,9 @@ class TkinterUI:
self.default_page_num_label.pack(side=tk.LEFT, padx=4, pady=8) self.default_page_num_label.pack(side=tk.LEFT, padx=4, pady=8)
self.default_page_num_entry = tk.Entry(frame2_default_page_num) self.default_page_num_entry = tk.Entry(frame2_default_page_num)
self.default_page_num_entry.pack(side=tk.LEFT, padx=4, pady=8) self.default_page_num_entry.pack(side=tk.LEFT, padx=4, pady=8)
self.default_page_num_entry.insert(0, config.default_page_num) self.default_page_num_entry.insert(
0, config.getint("Settings", "default_page_num")
)
self.default_page_num_entry.bind("<KeyRelease>", self.update_default_page_num) self.default_page_num_entry.bind("<KeyRelease>", self.update_default_page_num)
frame2_recent_days = tk.Frame(frame2) frame2_recent_days = tk.Frame(frame2)
@ -562,7 +590,7 @@ class TkinterUI:
self.recent_days_label.pack(side=tk.LEFT, padx=4, pady=8) self.recent_days_label.pack(side=tk.LEFT, padx=4, pady=8)
self.recent_days_entry = tk.Entry(frame2_recent_days) self.recent_days_entry = tk.Entry(frame2_recent_days)
self.recent_days_entry.pack(side=tk.LEFT, padx=4, pady=8) self.recent_days_entry.pack(side=tk.LEFT, padx=4, pady=8)
self.recent_days_entry.insert(30, config.recent_days) self.recent_days_entry.insert(30, config.getint("Settings", "recent_days"))
self.recent_days_entry.bind("<KeyRelease>", self.update_recent_days) self.recent_days_entry.bind("<KeyRelease>", self.update_recent_days)
frame3_open_subscribe = tk.Frame(frame3) frame3_open_subscribe = tk.Frame(frame3)
@ -572,7 +600,9 @@ class TkinterUI:
frame3_open_subscribe, text="开启订阅源:", width=9 frame3_open_subscribe, text="开启订阅源:", width=9
) )
self.open_subscribe_label.pack(side=tk.LEFT, padx=4, pady=8) self.open_subscribe_label.pack(side=tk.LEFT, padx=4, pady=8)
self.open_subscribe_var = tk.BooleanVar(value=config.open_subscribe) self.open_subscribe_var = tk.BooleanVar(
value=config.getboolean("Settings", "open_subscribe")
)
self.open_subscribe_checkbutton = ttk.Checkbutton( self.open_subscribe_checkbutton = ttk.Checkbutton(
frame3_open_subscribe, frame3_open_subscribe,
variable=self.open_subscribe_var, variable=self.open_subscribe_var,
@ -595,7 +625,9 @@ class TkinterUI:
self.subscribe_urls_text.pack( self.subscribe_urls_text.pack(
side=tk.LEFT, padx=4, pady=8, expand=True, fill=tk.BOTH side=tk.LEFT, padx=4, pady=8, expand=True, fill=tk.BOTH
) )
self.subscribe_urls_text.insert(tk.END, ",".join(config.subscribe_urls)) self.subscribe_urls_text.insert(
tk.END, config.get("Settings", "subscribe_urls")
)
self.subscribe_urls_text.bind("<KeyRelease>", self.update_subscribe_urls) self.subscribe_urls_text.bind("<KeyRelease>", self.update_subscribe_urls)
frame4_multicast = tk.Frame(frame4) frame4_multicast = tk.Frame(frame4)
@ -605,7 +637,9 @@ class TkinterUI:
frame4_multicast, text="开启组播源:", width=9 frame4_multicast, text="开启组播源:", width=9
) )
self.open_multicast_label.pack(side=tk.LEFT, padx=4, pady=8) self.open_multicast_label.pack(side=tk.LEFT, padx=4, pady=8)
self.open_multicast_var = tk.BooleanVar(value=config.open_multicast) self.open_multicast_var = tk.BooleanVar(
value=config.getboolean("Settings", "open_multicast")
)
self.open_multicast_checkbutton = ttk.Checkbutton( self.open_multicast_checkbutton = ttk.Checkbutton(
frame4_multicast, frame4_multicast,
variable=self.open_multicast_var, variable=self.open_multicast_var,
@ -624,7 +658,7 @@ class TkinterUI:
self.region_list_text.pack( self.region_list_text.pack(
side=tk.LEFT, padx=4, pady=8, expand=True, fill=tk.BOTH side=tk.LEFT, padx=4, pady=8, expand=True, fill=tk.BOTH
) )
self.region_list_text.insert(tk.END, ",".join(config.region_list)) self.region_list_text.insert(tk.END, config.get("Settings", "region_list"))
self.region_list_text.bind("<KeyRelease>", self.update_region_list) self.region_list_text.bind("<KeyRelease>", self.update_region_list)
root_operate = tk.Frame(self.root) root_operate = tk.Frame(self.root)

@ -20,7 +20,7 @@ def get_fofa_urls_from_region_list():
""" """
Get the FOFA url from region Get the FOFA url from region
""" """
region_list = getattr(config, "region_list", []) region_list = config.get("Settings", "region_list").split(",")
urls = [] urls = []
region_url = getattr(fofa_map, "region_url") region_url = getattr(fofa_map, "region_url")
if "all" in region_list: if "all" in region_list:
@ -43,19 +43,21 @@ async def get_channels_by_fofa(callback):
fofa_results = {} fofa_results = {}
callback(f"正在获取组播源更新, 共{fofa_urls_len}个地区", 0) callback(f"正在获取组播源更新, 共{fofa_urls_len}个地区", 0)
proxy = None proxy = None
if config.open_proxy: open_proxy = config.getboolean("Settings", "open_proxy")
open_driver = config.getboolean("Settings", "open_driver")
if open_proxy:
proxy = await get_proxy(fofa_urls[0], best=True, with_test=True) proxy = await get_proxy(fofa_urls[0], best=True, with_test=True)
def process_fofa_channels(fofa_url): def process_fofa_channels(fofa_url):
nonlocal proxy, fofa_urls_len nonlocal proxy, fofa_urls_len, open_driver
results = {} results = {}
try: try:
if config.open_driver: if open_driver:
driver = setup_driver(proxy) driver = setup_driver(proxy)
try: try:
retry_func(lambda: driver.get(fofa_url), name=fofa_url) retry_func(lambda: driver.get(fofa_url), name=fofa_url)
except Exception as e: except Exception as e:
if config.open_proxy: if open_proxy:
proxy = get_proxy_next() proxy = get_proxy_next()
driver.close() driver.close()
driver.quit() driver.quit()
@ -76,7 +78,7 @@ async def get_channels_by_fofa(callback):
except Exception as e: except Exception as e:
print(e) print(e)
finally: finally:
if config.open_driver: if open_driver:
driver.close() driver.close()
driver.quit() driver.quit()
pbar.update() pbar.update()
@ -85,18 +87,16 @@ async def get_channels_by_fofa(callback):
f"正在获取组播源更新, 剩余{remain}个地区待获取, 预计剩余时间: {get_pbar_remaining(pbar, start_time)}", f"正在获取组播源更新, 剩余{remain}个地区待获取, 预计剩余时间: {get_pbar_remaining(pbar, start_time)}",
int((pbar.n / fofa_urls_len) * 100), int((pbar.n / fofa_urls_len) * 100),
) )
if config.open_online_search and pbar.n / fofa_urls_len == 1:
callback("正在获取在线搜索结果, 请耐心等待", 0)
return results return results
max_workers = 3 if config.open_driver else 10 max_workers = 3 if open_driver else 10
with ThreadPoolExecutor(max_workers=max_workers) as executor: with ThreadPoolExecutor(max_workers=max_workers) as executor:
futures = [ futures = [
executor.submit(process_fofa_channels, fofa_url) for fofa_url in fofa_urls executor.submit(process_fofa_channels, fofa_url) for fofa_url in fofa_urls
] ]
for future in futures: for future in futures:
fofa_results = merge_objects(fofa_results, future.result()) fofa_results = merge_objects(fofa_results, future.result())
if not config.open_driver: if not open_driver:
close_session() close_session()
pbar.close() pbar.close()
return fofa_results return fofa_results

@ -88,7 +88,7 @@ def get_region_urls_from_IPTV_Multicast_source():
region_url[name]["联通"] = unicom region_url[name]["联通"] = unicom
region_url[name]["移动"] = mobile region_url[name]["移动"] = mobile
region_url[name]["电信"] = telecom region_url[name]["电信"] = telecom
with open("multicast/multicast_map.json", "w", encoding="utf-8") as f: with open("updates/multicast/multicast_map.json", "w", encoding="utf-8") as f:
json.dump(region_url, f, ensure_ascii=False, indent=4) json.dump(region_url, f, ensure_ascii=False, indent=4)
@ -96,9 +96,9 @@ def get_multicast_urls_info_from_region_list():
""" """
Get the multicast urls info from region Get the multicast urls info from region
""" """
region_list = getattr(config, "region_list", []) region_list = config.get("Settings", "region_list").split(",")
urls_info = [] urls_info = []
with open("multicast/multicast_map.json", "r", encoding="utf-8") as f: with open("updates/multicast/multicast_map.json", "r", encoding="utf-8") as f:
region_url = json.load(f) region_url = json.load(f)
if "all" in region_list: if "all" in region_list:
urls_info = [ urls_info = [
@ -125,7 +125,9 @@ async def get_multicast_region_result():
multicast_result = await get_channels_by_subscribe_urls( multicast_result = await get_channels_by_subscribe_urls(
urls=multicast_region_urls_info, multicast=True urls=multicast_region_urls_info, multicast=True
) )
with open("multicast/multicast_region_result.json", "w", encoding="utf-8") as f: with open(
"updates/multicast/multicast_region_result.json", "w", encoding="utf-8"
) as f:
json.dump(multicast_result, f, ensure_ascii=False, indent=4) json.dump(multicast_result, f, ensure_ascii=False, indent=4)
@ -139,10 +141,15 @@ async def get_channels_by_multicast(names, callback):
# if not pageUrl: # if not pageUrl:
# return channels # return channels
proxy = None proxy = None
if config.open_proxy: open_proxy = config.getboolean("Settings", "open_proxy")
open_driver = config.getboolean("Settings", "open_driver")
default_page_num = config.getint("Settings", "default_page_num")
if open_proxy:
proxy = await get_proxy(pageUrl, best=True, with_test=True) proxy = await get_proxy(pageUrl, best=True, with_test=True)
start_time = time() start_time = time()
with open("multicast/multicast_region_result.json", "r", encoding="utf-8") as f: with open(
"updates/multicast/multicast_region_result.json", "r", encoding="utf-8"
) as f:
multicast_region_result = json.load(f) multicast_region_result = json.load(f)
name_region_type_result = get_channel_multicast_name_region_type_result( name_region_type_result = get_channel_multicast_name_region_type_result(
multicast_region_result, names multicast_region_result, names
@ -150,18 +157,18 @@ async def get_channels_by_multicast(names, callback):
region_type_list = get_channel_multicast_region_type_list(name_region_type_result) region_type_list = get_channel_multicast_region_type_list(name_region_type_result)
def process_channel_by_multicast(region, type): def process_channel_by_multicast(region, type):
nonlocal proxy, open_driver, default_page_num
name = f"{region}{type}" name = f"{region}{type}"
info_list = [] info_list = []
nonlocal proxy
try: try:
if config.open_driver: if open_driver:
driver = setup_driver(proxy) driver = setup_driver(proxy)
try: try:
retry_func( retry_func(
lambda: driver.get(pageUrl), name=f"multicast search:{name}" lambda: driver.get(pageUrl), name=f"multicast search:{name}"
) )
except Exception as e: except Exception as e:
if config.open_proxy: if open_proxy:
proxy = get_proxy_next() proxy = get_proxy_next()
driver.close() driver.close()
driver.quit() driver.quit()
@ -178,7 +185,7 @@ async def get_channels_by_multicast(names, callback):
name=f"multicast search:{name}", name=f"multicast search:{name}",
) )
except Exception as e: except Exception as e:
if config.open_proxy: if open_proxy:
proxy = get_proxy_next() proxy = get_proxy_next()
page_soup = get_soup_requests(pageUrl, data=post_form, proxy=proxy) page_soup = get_soup_requests(pageUrl, data=post_form, proxy=proxy)
if not page_soup: if not page_soup:
@ -192,19 +199,16 @@ async def get_channels_by_multicast(names, callback):
code = parse_qs(parsed_url.query).get("code", [None])[0] code = parse_qs(parsed_url.query).get("code", [None])[0]
if code: if code:
break break
isFavorite = name in config.favorite_list pageNum = default_page_num
pageNum = (
config.favorite_page_num if isFavorite else config.default_page_num
)
# retry_limit = 3 # retry_limit = 3
for page in range(1, pageNum + 1): for page in range(1, pageNum + 1):
# retries = 0 # retries = 0
# if not config.open_driver and page == 1: # if not open_driver and page == 1:
# retries = 2 # retries = 2
# while retries < retry_limit: # while retries < retry_limit:
try: try:
if page > 1: if page > 1:
if config.open_driver: if open_driver:
page_link = find_clickable_element_with_retry( page_link = find_clickable_element_with_retry(
driver, driver,
( (
@ -226,26 +230,22 @@ async def get_channels_by_multicast(names, callback):
name=f"multicast search:{name}, page:{page}", name=f"multicast search:{name}, page:{page}",
) )
sleep(1) sleep(1)
soup = ( soup = get_soup(driver.page_source) if open_driver else page_soup
get_soup(driver.page_source)
if config.open_driver
else page_soup
)
if soup: if soup:
results = ( results = (
get_results_from_multicast_soup(soup) get_results_from_multicast_soup(soup)
if config.open_driver if open_driver
else get_results_from_multicast_soup_requests(soup) else get_results_from_multicast_soup_requests(soup)
) )
print(name, "page:", page, "results num:", len(results)) print(name, "page:", page, "results num:", len(results))
if len(results) == 0: if len(results) == 0:
print(f"{name}:No results found") print(f"{name}:No results found")
# if config.open_driver: # if open_driver:
# driver.refresh() # driver.refresh()
# retries += 1 # retries += 1
# continue # continue
# elif len(results) <= 3: # elif len(results) <= 3:
# if config.open_driver: # if open_driver:
# next_page_link = find_clickable_element_with_retry( # next_page_link = find_clickable_element_with_retry(
# driver, # driver,
# ( # (
@ -255,7 +255,7 @@ async def get_channels_by_multicast(names, callback):
# retries=1, # retries=1,
# ) # )
# if next_page_link: # if next_page_link:
# if config.open_proxy: # if open_proxy:
# proxy = get_proxy_next() # proxy = get_proxy_next()
# driver.close() # driver.close()
# driver.quit() # driver.quit()
@ -267,7 +267,7 @@ async def get_channels_by_multicast(names, callback):
# break # break
else: else:
print(f"{name}:No results found") print(f"{name}:No results found")
# if config.open_driver: # if open_driver:
# driver.refresh() # driver.refresh()
# retries += 1 # retries += 1
# continue # continue
@ -281,7 +281,7 @@ async def get_channels_by_multicast(names, callback):
print(f"{name}:Error on search: {e}") print(f"{name}:Error on search: {e}")
pass pass
finally: finally:
if config.open_driver: if open_driver:
driver.close() driver.close()
driver.quit() driver.quit()
pbar.update() pbar.update()
@ -318,7 +318,7 @@ async def get_channels_by_multicast(names, callback):
channels = get_channel_multicast_result( channels = get_channel_multicast_result(
name_region_type_result, search_region_type_result name_region_type_result, search_region_type_result
) )
if not config.open_driver: if not open_driver:
close_session() close_session()
pbar.close() pbar.close()
return channels return channels

@ -70,22 +70,31 @@ async def get_channels_by_online_search(names, callback):
if not pageUrl: if not pageUrl:
return channels return channels
proxy = None proxy = None
if config.open_proxy: open_proxy = config.getboolean("Settings", "open_proxy")
open_driver = config.getboolean("Settings", "open_driver")
favorite_list = [
favorite
for favorite in config.get("Settings", "favorite_list").split(",")
if favorite.strip()
]
favorite_page_num = config.getint("Settings", "favorite_page_num")
default_page_num = config.getint("Settings", "default_page_num")
if open_proxy:
proxy = await get_proxy(pageUrl, best=True, with_test=True) proxy = await get_proxy(pageUrl, best=True, with_test=True)
start_time = time() start_time = time()
def process_channel_by_online_search(name): def process_channel_by_online_search(name):
nonlocal proxy, open_proxy, open_driver, favorite_list, favorite_page_num, default_page_num
info_list = [] info_list = []
nonlocal proxy
try: try:
if config.open_driver: if open_driver:
driver = setup_driver(proxy) driver = setup_driver(proxy)
try: try:
retry_func( retry_func(
lambda: driver.get(pageUrl), name=f"online search:{name}" lambda: driver.get(pageUrl), name=f"online search:{name}"
) )
except Exception as e: except Exception as e:
if config.open_proxy: if open_proxy:
proxy = get_proxy_next() proxy = get_proxy_next()
driver.close() driver.close()
driver.quit() driver.quit()
@ -101,25 +110,22 @@ async def get_channels_by_online_search(names, callback):
name=f"online search:{name}", name=f"online search:{name}",
) )
except Exception as e: except Exception as e:
if config.open_proxy: if open_proxy:
proxy = get_proxy_next() proxy = get_proxy_next()
page_soup = get_soup_requests(request_url, proxy=proxy) page_soup = get_soup_requests(request_url, proxy=proxy)
if not page_soup: if not page_soup:
print(f"{name}:Request fail.") print(f"{name}:Request fail.")
return return
isFavorite = name in config.favorite_list pageNum = favorite_page_num if name in favorite_list else default_page_num
pageNum = (
config.favorite_page_num if isFavorite else config.default_page_num
)
retry_limit = 3 retry_limit = 3
for page in range(1, pageNum + 1): for page in range(1, pageNum + 1):
retries = 0 retries = 0
if not config.open_driver and page == 1: if not open_driver and page == 1:
retries = 2 retries = 2
while retries < retry_limit: while retries < retry_limit:
try: try:
if page > 1: if page > 1:
if config.open_driver: if open_driver:
page_link = find_clickable_element_with_retry( page_link = find_clickable_element_with_retry(
driver, driver,
( (
@ -141,14 +147,12 @@ async def get_channels_by_online_search(names, callback):
) )
sleep(1) sleep(1)
soup = ( soup = (
get_soup(driver.page_source) get_soup(driver.page_source) if open_driver else page_soup
if config.open_driver
else page_soup
) )
if soup: if soup:
results = ( results = (
get_results_from_soup(soup, name) get_results_from_soup(soup, name)
if config.open_driver if open_driver
else get_results_from_soup_requests(soup, name) else get_results_from_soup_requests(soup, name)
) )
print(name, "page:", page, "results num:", len(results)) print(name, "page:", page, "results num:", len(results))
@ -156,12 +160,12 @@ async def get_channels_by_online_search(names, callback):
print( print(
f"{name}:No results found, refreshing page and retrying..." f"{name}:No results found, refreshing page and retrying..."
) )
if config.open_driver: if open_driver:
driver.refresh() driver.refresh()
retries += 1 retries += 1
continue continue
elif len(results) <= 3: elif len(results) <= 3:
if config.open_driver: if open_driver:
next_page_link = find_clickable_element_with_retry( next_page_link = find_clickable_element_with_retry(
driver, driver,
( (
@ -171,7 +175,7 @@ async def get_channels_by_online_search(names, callback):
retries=1, retries=1,
) )
if next_page_link: if next_page_link:
if config.open_proxy: if open_proxy:
proxy = get_proxy_next() proxy = get_proxy_next()
driver.close() driver.close()
driver.quit() driver.quit()
@ -188,7 +192,7 @@ async def get_channels_by_online_search(names, callback):
print( print(
f"{name}:No results found, refreshing page and retrying..." f"{name}:No results found, refreshing page and retrying..."
) )
if config.open_driver: if open_driver:
driver.refresh() driver.refresh()
retries += 1 retries += 1
continue continue
@ -201,7 +205,7 @@ async def get_channels_by_online_search(names, callback):
print(f"{name}:Error on search: {e}") print(f"{name}:Error on search: {e}")
pass pass
finally: finally:
if config.open_driver: if open_driver:
driver.close() driver.close()
driver.quit() driver.quit()
pbar.update() pbar.update()
@ -224,7 +228,7 @@ async def get_channels_by_online_search(names, callback):
data = result.get("data", []) data = result.get("data", [])
if name: if name:
channels[name] = data channels[name] = data
if not config.open_driver: if not open_driver:
close_session() close_session()
pbar.close() pbar.close()
return channels return channels

@ -22,6 +22,7 @@ def get_proxy_list(page_count=1):
] ]
proxy_list = [] proxy_list = []
urls = [] urls = []
open_driver = config.getboolean("Settings", "open_driver")
for page_index in range(1, page_count + 1): for page_index in range(1, page_count + 1):
for pattern in url_pattern: for pattern in url_pattern:
url = pattern.format(page_index) url = pattern.format(page_index)
@ -29,9 +30,10 @@ def get_proxy_list(page_count=1):
pbar = tqdm(total=len(urls), desc="Getting proxy list") pbar = tqdm(total=len(urls), desc="Getting proxy list")
def get_proxy(url): def get_proxy(url):
nonlocal open_driver
proxys = [] proxys = []
try: try:
if config.open_driver: if open_driver:
soup = retry_func(lambda: get_soup_driver(url), name=url) soup = retry_func(lambda: get_soup_driver(url), name=url)
else: else:
try: try:
@ -50,12 +52,12 @@ def get_proxy_list(page_count=1):
pbar.update() pbar.update()
return proxys return proxys
max_workers = 3 if config.open_driver else 10 max_workers = 3 if open_driver else 10
with ThreadPoolExecutor(max_workers=max_workers) as executor: with ThreadPoolExecutor(max_workers=max_workers) as executor:
futures = [executor.submit(get_proxy, url) for url in urls] futures = [executor.submit(get_proxy, url) for url in urls]
for future in futures: for future in futures:
proxy_list.extend(future.result()) proxy_list.extend(future.result())
if not config.open_driver: if not open_driver:
close_session() close_session()
pbar.close() pbar.close()
return proxy_list return proxy_list

@ -19,7 +19,12 @@ async def get_channels_by_subscribe_urls(urls=None, multicast=False, callback=No
""" """
subscribe_results = {} subscribe_results = {}
pattern = r"^(.*?),(?!#genre#)(.*?)$" pattern = r"^(.*?),(?!#genre#)(.*?)$"
subscribe_urls_len = len(urls if urls else config.subscribe_urls) subscribe_urls = [
url
for url in config.get("Settings", "subscribe_urls").split(",")
if url.strip()
]
subscribe_urls_len = len(urls if urls else subscribe_urls)
pbar = tqdm_asyncio(total=subscribe_urls_len, desc="Processing subscribe") pbar = tqdm_asyncio(total=subscribe_urls_len, desc="Processing subscribe")
start_time = time() start_time = time()
if callback: if callback:
@ -84,7 +89,7 @@ async def get_channels_by_subscribe_urls(urls=None, multicast=False, callback=No
with ThreadPoolExecutor(max_workers=100) as executor: with ThreadPoolExecutor(max_workers=100) as executor:
futures = [ futures = [
executor.submit(process_subscribe_channels, subscribe_url) executor.submit(process_subscribe_channels, subscribe_url)
for subscribe_url in (urls if urls else config.subscribe_urls) for subscribe_url in (urls if urls else subscribe_urls)
] ]
for future in futures: for future in futures:
subscribe_results = merge_objects(subscribe_results, future.result()) subscribe_results = merge_objects(subscribe_results, future.result())

@ -11,7 +11,14 @@ from opencc import OpenCC
config = get_config() config = get_config()
handler = RotatingFileHandler("result_new.log", encoding="utf-8") log_dir = "output"
log_file = "result_new.log"
log_path = os.path.join(log_dir, log_file)
if not os.path.exists(log_dir):
os.makedirs(log_dir)
handler = RotatingFileHandler(log_path, encoding="utf-8")
logging.basicConfig( logging.basicConfig(
handlers=[handler], handlers=[handler],
format="%(message)s", format="%(message)s",
@ -46,28 +53,17 @@ def get_channel_items():
""" """
Get the channel items from the source file Get the channel items from the source file
""" """
# Open the source file and read all lines. user_source_file = config.get("Settings", "source_file")
user_source_file = ( user_final_file = config.get("Settings", "final_file")
"user_" + config.source_file
if os.path.exists("user_" + config.source_file)
else getattr(config, "source_file", "demo.txt")
)
# Open the old final file and read all lines.
user_final_file = (
"user_" + config.final_file
if os.path.exists("user_" + config.final_file)
else getattr(config, "final_file", "result.txt")
)
# Create a dictionary to store the channels.
channels = defaultdict(lambda: defaultdict(list)) channels = defaultdict(lambda: defaultdict(list))
if os.path.exists(resource_path(user_source_file)): if os.path.exists(resource_path(user_source_file)):
with open(resource_path(user_source_file), "r", encoding="utf-8") as file: with open(resource_path(user_source_file), "r", encoding="utf-8") as file:
channels = get_channel_data_from_file(channels, file) channels = get_channel_data_from_file(channels, file)
if config.open_use_old_result and os.path.exists(resource_path(user_final_file)): if config.getboolean("Settings", "open_use_old_result") and os.path.exists(
resource_path(user_final_file)
):
with open(resource_path(user_final_file), "r", encoding="utf-8") as file: with open(resource_path(user_final_file), "r", encoding="utf-8") as file:
channels = get_channel_data_from_file(channels, file) channels = get_channel_data_from_file(channels, file)
@ -78,7 +74,7 @@ def format_channel_name(name):
""" """
Format the channel name with sub and replace and lower Format the channel name with sub and replace and lower
""" """
if config.open_keep_all: if config.getboolean("Settings", "open_keep_all"):
return name return name
sub_pattern = ( sub_pattern = (
r"-|_|\((.*?)\)|\[(.*?)\]| |频道|标清|高清|HD|hd|超清|超高|超高清|中央|央视|台" r"-|_|\((.*?)\)|\[(.*?)\]| |频道|标清|高清|HD|hd|超清|超高|超高清|中央|央视|台"
@ -122,7 +118,7 @@ def channel_name_is_equal(name1, name2):
""" """
Check if the channel name is equal Check if the channel name is equal
""" """
if config.open_keep_all: if config.getboolean("Settings", "open_keep_all"):
return True return True
cc = OpenCC("t2s") cc = OpenCC("t2s")
name1_converted = cc.convert(format_channel_name(name1)) name1_converted = cc.convert(format_channel_name(name1))
@ -214,7 +210,7 @@ def get_channel_multicast_region_type_list(result):
""" """
Get the channel multicast region type list from result Get the channel multicast region type list from result
""" """
config_region_list = set(getattr(config, "region_list", [])) config_region_list = set(config.get("Settings", "region_list").split(","))
region_type_list = { region_type_list = {
(region, type) (region, type)
for region_type in result.values() for region_type in result.values()
@ -376,7 +372,7 @@ def update_channel_urls_txt(cate, name, urls):
Update the category and channel urls to the final file Update the category and channel urls to the final file
""" """
genre_line = cate + ",#genre#\n" genre_line = cate + ",#genre#\n"
filename = "result_new.txt" filename = "output/result_new.txt"
if not os.path.exists(filename): if not os.path.exists(filename):
open(filename, "w").close() open(filename, "w").close()
@ -465,7 +461,7 @@ def append_total_data(*args, **kwargs):
""" """
Append total channel data Append total channel data
""" """
if config.open_keep_all: if config.getboolean("Settings", "open_keep_all"):
return append_all_method_data_keep_all(*args, **kwargs) return append_all_method_data_keep_all(*args, **kwargs)
else: else:
return append_all_method_data(*args, **kwargs) return append_all_method_data(*args, **kwargs)
@ -484,7 +480,7 @@ def append_all_method_data(
("multicast", multicast_result), ("multicast", multicast_result),
("online_search", online_search_result), ("online_search", online_search_result),
]: ]:
if getattr(config, f"open_{method}"): if config.getboolean("Settings", f"open_{method}"):
data = append_data_to_info_data( data = append_data_to_info_data(
data, data,
cate, cate,
@ -497,7 +493,9 @@ def append_all_method_data(
len(get_channel_results_by_name(name, result)), len(get_channel_results_by_name(name, result)),
) )
total_channel_data_len = len(data.get(cate, {}).get(name, [])) total_channel_data_len = len(data.get(cate, {}).get(name, []))
if total_channel_data_len == 0 or config.open_use_old_result: if total_channel_data_len == 0 or config.getboolean(
"Settings", "open_use_old_result"
):
data = append_data_to_info_data( data = append_data_to_info_data(
data, data,
cate, cate,
@ -524,11 +522,11 @@ def append_all_method_data_keep_all(
("multicast", multicast_result), ("multicast", multicast_result),
("online_search", online_search_result), ("online_search", online_search_result),
]: ]:
if result and getattr(config, f"open_{result_name}"): if result and config.getboolean("Settings", f"open_{result_name}"):
for name, urls in result.items(): for name, urls in result.items():
data = append_data_to_info_data(data, cate, name, urls) data = append_data_to_info_data(data, cate, name, urls)
print(name, f"{result_name.capitalize()} num:", len(urls)) print(name, f"{result_name.capitalize()} num:", len(urls))
if config.open_use_old_result: if config.getboolean("Settings", "open_use_old_result"):
old_urls = channel_obj.get(name, []) old_urls = channel_obj.get(name, [])
data = append_data_to_info_data( data = append_data_to_info_data(
data, data,

@ -1,6 +1,8 @@
from os import path from os import path
import sys import sys
from importlib import util
# from importlib import util
import configparser
def resource_path(relative_path, persistent=False): def resource_path(relative_path, persistent=False):
@ -19,29 +21,33 @@ def resource_path(relative_path, persistent=False):
return total_path return total_path
def load_external_config(config_path): # def load_external_config(config_path):
""" # """
Load the external config file # Load the external config file
""" # """
config = None # config = None
if path.exists(config_path): # if path.exists(config_path):
spec = util.spec_from_file_location("config", config_path) # spec = util.spec_from_file_location("config", config_path)
config = util.module_from_spec(spec) # config = util.module_from_spec(spec)
spec.loader.exec_module(config) # spec.loader.exec_module(config)
else: # else:
import config # import config.config as config
return config # return config
def get_config(): def get_config():
""" """
Get the config Get the config
""" """
user_config_path = resource_path("user_config.py") config_parser = configparser.ConfigParser()
default_config_path = resource_path("config.py") user_config_path = resource_path("config/user_config.ini")
config = ( default_config_path = resource_path("config/config.ini")
load_external_config(user_config_path)
if path.exists(user_config_path) config_files = [user_config_path, default_config_path]
else load_external_config(default_config_path) for config_file in config_files:
) if path.exists(config_file):
return config with open(config_file, "r", encoding="utf-8") as f:
config_parser.read_file(f)
break
return config_parser

@ -160,10 +160,13 @@ async def sort_urls_by_speed_and_resolution(infoList, ffmpeg=False):
default_response_time_weight = 0.5 default_response_time_weight = 0.5
default_resolution_weight = 0.5 default_resolution_weight = 0.5
response_time_weight = getattr( response_time_weight = (
config, "response_time_weight", default_response_time_weight config.getfloat("Settings", "response_time_weight")
or default_response_time_weight
)
resolution_weight = (
config.getfloat("Settings", "resolution_weight") or default_resolution_weight
) )
resolution_weight = getattr(config, "resolution_weight", default_resolution_weight)
# Check if weights are valid # Check if weights are valid
if not ( if not (
0 <= response_time_weight <= 1 0 <= response_time_weight <= 1

@ -45,7 +45,7 @@ def filter_by_date(data):
Filter by date and limit Filter by date and limit
""" """
default_recent_days = 30 default_recent_days = 30
use_recent_days = getattr(config, "recent_days", 30) use_recent_days = config.getint("Settings", "recent_days")
if not isinstance(use_recent_days, int) or use_recent_days <= 0: if not isinstance(use_recent_days, int) or use_recent_days <= 0:
use_recent_days = default_recent_days use_recent_days = default_recent_days
start_date = datetime.datetime.now() - datetime.timedelta(days=use_recent_days) start_date = datetime.datetime.now() - datetime.timedelta(days=use_recent_days)
@ -64,8 +64,10 @@ def filter_by_date(data):
recent_data_len = len(recent_data) recent_data_len = len(recent_data)
if recent_data_len == 0: if recent_data_len == 0:
recent_data = unrecent_data recent_data = unrecent_data
elif recent_data_len < config.urls_limit: elif recent_data_len < config.getint("Settings", "urls_limit"):
recent_data.extend(unrecent_data[: config.urls_limit - len(recent_data)]) recent_data.extend(
unrecent_data[: config.getint("Settings", "urls_limit") - len(recent_data)]
)
return recent_data return recent_data
@ -88,7 +90,7 @@ def get_total_urls_from_info_list(infoList):
Get the total urls from info list Get the total urls from info list
""" """
total_urls = [url for url, _, _ in infoList] total_urls = [url for url, _, _ in infoList]
return list(dict.fromkeys(total_urls))[: int(config.urls_limit)] return list(dict.fromkeys(total_urls))[: config.getint("Settings", "urls_limit")]
def get_total_urls_from_sorted_data(data): def get_total_urls_from_sorted_data(data):
@ -96,11 +98,11 @@ def get_total_urls_from_sorted_data(data):
Get the total urls with filter by date and depulicate from sorted data Get the total urls with filter by date and depulicate from sorted data
""" """
total_urls = [] total_urls = []
if len(data) > config.urls_limit: if len(data) > config.getint("Settings", "urls_limit"):
total_urls = [url for (url, _, _), _ in filter_by_date(data)] total_urls = [url for (url, _, _), _ in filter_by_date(data)]
else: else:
total_urls = [url for (url, _, _), _ in data] total_urls = [url for (url, _, _), _ in data]
return list(dict.fromkeys(total_urls))[: config.urls_limit] return list(dict.fromkeys(total_urls))[: config.getint("Settings", "urls_limit")]
def is_ipv6(url): def is_ipv6(url):
@ -119,7 +121,7 @@ def check_url_ipv_type(url):
""" """
Check if the url is compatible with the ipv type in the config Check if the url is compatible with the ipv type in the config
""" """
ipv_type = getattr(config, "ipv_type", "ipv4") ipv_type = config.get("Settings", "ipv_type")
if ipv_type == "ipv4": if ipv_type == "ipv4":
return not is_ipv6(url) return not is_ipv6(url)
elif ipv_type == "ipv6": elif ipv_type == "ipv6":
@ -134,7 +136,8 @@ def check_by_domain_blacklist(url):
""" """
domain_blacklist = [ domain_blacklist = [
urlparse(domain).netloc if urlparse(domain).scheme else domain urlparse(domain).netloc if urlparse(domain).scheme else domain
for domain in getattr(config, "domain_blacklist", []) for domain in config.get("Settings", "domain_blacklist").split(",")
if domain.strip()
] ]
return urlparse(url).netloc not in domain_blacklist return urlparse(url).netloc not in domain_blacklist
@ -143,7 +146,11 @@ def check_by_url_keywords_blacklist(url):
""" """
Check by URL blacklist keywords Check by URL blacklist keywords
""" """
url_keywords_blacklist = getattr(config, "url_keywords_blacklist", []) url_keywords_blacklist = [
keyword
for keyword in config.get("Settings", "url_keywords_blacklist").split(",")
if keyword.strip()
]
return not any(keyword in url for keyword in url_keywords_blacklist) return not any(keyword in url for keyword in url_keywords_blacklist)