feat:retry

This commit is contained in:
guorong.zheng 2024-06-19 20:31:06 +08:00
parent 091a4541fa
commit 1af9ff2865
4 changed files with 162 additions and 56 deletions

@ -15,10 +15,11 @@ feedparser = "*"
pytz = "*"
selenium = "*"
selenium-stealth = "*"
aiohttp = "*"
bs4 = "*"
tqdm = "*"
async-timeout = "*"
pyinstaller = "*"
aiohttp-retry = "*"
[requires]
python_version = "3.8"

99
Pipfile.lock generated

@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
"sha256": "11c3de86a4608bc8cb9df8ad66ccc8badc83779b3a5daf7b221a7a3a53ae1660"
"sha256": "7ba6998730a27a7cc92698bd503c672eb5a0bcce3a7412a7e044e50d915240ef"
},
"pipfile-spec": 6,
"requires": {
@ -99,6 +99,15 @@
"markers": "python_version >= '3.8'",
"version": "==3.9.5"
},
"aiohttp-retry": {
"hashes": [
"sha256:3aeeead8f6afe48272db93ced9440cf4eda8b6fd7ee2abb25357b7eb28525b45",
"sha256:9a8e637e31682ad36e1ff9f8bcba912fcfc7d7041722bc901a4b948da4d71ea9"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
"version": "==2.8.3"
},
"aiosignal": {
"hashes": [
"sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc",
@ -107,6 +116,13 @@
"markers": "python_version >= '3.7'",
"version": "==1.3.1"
},
"altgraph": {
"hashes": [
"sha256:1b5afbb98f6c4dcadb2e2ae6ab9fa994bbb8c1d75f4fa96d340f9437ae454406",
"sha256:642743b4750de17e655e6711601b077bc6598dbfa3ba5fa2b2a35ce12b508dff"
],
"version": "==0.17.4"
},
"async-timeout": {
"hashes": [
"sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f",
@ -426,6 +442,14 @@
"markers": "python_version >= '3.5'",
"version": "==3.7"
},
"importlib-metadata": {
"hashes": [
"sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570",
"sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"
],
"markers": "python_version < '3.10'",
"version": "==7.1.0"
},
"multidict": {
"hashes": [
"sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556",
@ -530,6 +554,22 @@
"markers": "python_version >= '3.7'",
"version": "==1.3.0.post0"
},
"packaging": {
"hashes": [
"sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002",
"sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"
],
"markers": "python_version >= '3.8'",
"version": "==24.1"
},
"pefile": {
"hashes": [
"sha256:82e6114004b3d6911c77c3953e3838654b04511b8b66e8583db70c65998017dc",
"sha256:da185cd2af68c08a6cd4481f7325ed600a88f6a813bad9dea07ab3ef73d8d8d6"
],
"markers": "sys_platform == 'win32'",
"version": "==2023.2.7"
},
"pycparser": {
"hashes": [
"sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6",
@ -538,6 +578,33 @@
"markers": "python_version >= '3.8'",
"version": "==2.22"
},
"pyinstaller": {
"hashes": [
"sha256:000c36b13fe4cd8d0d8c2bc855b1ddcf39867b5adf389e6b5ca45b25fa3e619d",
"sha256:1c3060a263758cf7f0144ab4c016097b20451b2469d468763414665db1bb743d",
"sha256:2b71509468c811968c0b5decb5bbe85b6292ea52d7b1f26313d2aabb673fa9a5",
"sha256:355832a3acc7de90a255ecacd4b9f9e166a547a79c8905d49f14e3a75c1acdb9",
"sha256:39ac424d2ee2457d2ab11a5091436e75a0cccae207d460d180aa1fcbbafdd528",
"sha256:3f4b6520f4423fe19bcc2fd63ab7238851ae2bdcbc98f25bc5d2f97cc62012e9",
"sha256:5ff6bc2784c1026f8e2f04aa3760cbed41408e108a9d4cf1dd52ee8351a3f6e1",
"sha256:6303c7a009f47e6a96ef65aed49f41e36ece8d079b9193ca92fe807403e5fe80",
"sha256:81cccfa9b16699b457f4788c5cc119b50f3cd4d0db924955f15c33f2ad27a50d",
"sha256:d257f6645c7334cbd66f38a4fac62c3ad614cc46302b2b5d9f8cc48c563bce0e",
"sha256:fe0af018d7d5077180e3144ada89a4da5df8d07716eb7e9482834a56dc57a4e8",
"sha256:ff31c5b99e05a4384bbe2071df67ec8b2b347640a375eae9b40218be2f1754c6"
],
"index": "pypi",
"markers": "python_version < '3.13' and python_version >= '3.8'",
"version": "==6.8.0"
},
"pyinstaller-hooks-contrib": {
"hashes": [
"sha256:8bf0775771fbaf96bcd2f4dfd6f7ae6c1dd1b1efe254c7e50477b3c08e7841d8",
"sha256:fd5f37dcf99bece184e40642af88be16a9b89613ecb958a8bd1136634fc9fac5"
],
"markers": "python_version >= '3.7'",
"version": "==2024.7"
},
"pysocks": {
"hashes": [
"sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299",
@ -554,6 +621,14 @@
"index": "pypi",
"version": "==2024.1"
},
"pywin32-ctypes": {
"hashes": [
"sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60",
"sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"
],
"markers": "sys_platform == 'win32'",
"version": "==0.2.2"
},
"requests": {
"hashes": [
"sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760",
@ -580,6 +655,14 @@
"markers": "python_version >= '3' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==1.0.6"
},
"setuptools": {
"hashes": [
"sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4",
"sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"
],
"markers": "python_version >= '3.8'",
"version": "==70.0.0"
},
"sgmllib3k": {
"hashes": [
"sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"
@ -647,11 +730,11 @@
"socks"
],
"hashes": [
"sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d",
"sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"
"sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472",
"sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"
],
"markers": "python_version >= '3.8'",
"version": "==2.2.1"
"version": "==2.2.2"
},
"wsproto": {
"hashes": [
@ -756,6 +839,14 @@
],
"markers": "python_version >= '3.7'",
"version": "==1.9.4"
},
"zipp": {
"hashes": [
"sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19",
"sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"
],
"markers": "python_version >= '3.8'",
"version": "==3.19.2"
}
},
"develop": {}

76
main.py

@ -43,42 +43,46 @@ class UpdateSource:
self.total = 0
self.start_time = None
def append_data_to_info_data(self, cate, name, data):
def check_info_data(self, cate, name):
if self.channel_data.get(cate) is None:
self.channel_data[cate] = {}
if self.channel_data[cate].get(name) is None:
self.channel_data[cate][name] = []
def append_data_to_info_data(self, cate, name, data, check=True):
self.check_info_data(cate, name)
for url, date, resolution in data:
if url and check_url_by_patterns(url):
if self.channel_data.get(cate) is None:
self.channel_data[cate] = {}
if self.channel_data[cate].get(name) is None:
self.channel_data[cate][name] = []
if (url and not check) or (url and check and check_url_by_patterns(url)):
self.channel_data[cate][name].append((url, date, resolution))
async def sort_channel_list(self, cate, name, info_list):
try:
if config.open_sort:
sorted_data = await sort_urls_by_speed_and_resolution(info_list)
if sorted_data:
for (
url,
date,
resolution,
), response_time in sorted_data:
logging.info(
f"Name: {name}, URL: {url}, Date: {date}, Resolution: {resolution}, Response Time: {response_time}ms"
)
data = [
(url, date, resolution)
for (url, date, resolution), _ in sorted_data
]
self.append_data_to_info_data(cate, name, data)
sorted_data = await sort_urls_by_speed_and_resolution(info_list)
if sorted_data:
self.check_info_data(cate, name)
self.channel_data[cate][name] = []
for (
url,
date,
resolution,
), response_time in sorted_data:
logging.info(
f"Name: {name}, URL: {url}, Date: {date}, Resolution: {resolution}, Response Time: {response_time}ms"
)
data = [
(url, date, resolution)
for (url, date, resolution), _ in sorted_data
]
self.append_data_to_info_data(cate, name, data, False)
except Exception as e:
logging.error(f"Error: {e}")
finally:
self.pbar.update()
self.pbar.set_description(
f"Sorting, {self.pbar.total - self.pbar.n} urls remaining"
f"Sorting, {self.pbar.total - self.pbar.n} channels remaining"
)
self.update_progress(
f"正在测速排序, 剩余{self.pbar.total - self.pbar.n}接口, 预计剩余时间: {get_pbar_remaining(self.pbar, self.start_time)}",
f"正在测速排序, 剩余{self.pbar.total - self.pbar.n}频道, 预计剩余时间: {get_pbar_remaining(self.pbar, self.start_time)}",
int((self.pbar.n / self.total) * 100),
)
@ -154,7 +158,7 @@ class UpdateSource:
self.tasks = []
channel_names = [
name
for cate, channel_obj in self.channel_items.items()
for channel_obj in self.channel_items.values()
for name in channel_obj.keys()
]
self.total = len(channel_names)
@ -167,9 +171,11 @@ class UpdateSource:
for name, info_list in channel_obj.items()
]
self.pbar = tqdm_asyncio(total=len(self.tasks))
self.pbar.set_description(f"Sorting, {len(self.tasks)} urls remaining")
self.pbar.set_description(
f"Sorting, {len(self.tasks)} channels remaining"
)
self.update_progress(
f"正在测速排序, 共{len(self.tasks)}个接口",
f"正在测速排序, 共{len(self.tasks)}频道",
int((self.pbar.n / len(self.tasks)) * 100),
)
self.start_time = time()
@ -181,13 +187,19 @@ class UpdateSource:
handler.close()
logging.root.removeHandler(handler)
user_final_file = getattr(config, "final_file", "result.txt")
user_log_file = (
"user_result.log" if os.path.exists("user_config.py") else "result.log"
)
update_file(user_final_file, "result_new.txt")
update_file(user_log_file, "result_new.log")
if config.open_sort:
user_log_file = (
"user_result.log"
if os.path.exists("user_config.py")
else "result.log"
)
update_file(user_log_file, "result_new.log")
print(f"Update completed! Please check the {user_final_file} file!")
self.update_progress(f"更新完成, 请检查{user_final_file}文件", 100, True)
if self.run_ui:
self.update_progress(
f"更新完成, 请检查{user_final_file}文件", 100, True
)
except asyncio.exceptions.CancelledError:
print("Update cancelled!")

@ -1,5 +1,5 @@
from selenium import webdriver
import aiohttp
from aiohttp_retry import RetryClient, ExponentialRetry
import asyncio
from time import time
import re
@ -23,7 +23,7 @@ import concurrent.futures
import sys
import importlib.util
timeout = 15
timeout = 10
max_retries = 3
@ -50,9 +50,7 @@ def resource_path(relative_path, persistent=False):
"""
base_path = os.path.abspath(".")
total_path = os.path.join(base_path, relative_path)
if persistent:
return total_path
if os.path.exists(total_path):
if persistent or os.path.exists(total_path):
return total_path
else:
try:
@ -292,7 +290,7 @@ async def get_channels_by_online_search(names, callback):
wait = WebDriverWait(driver, timeout)
info_list = []
try:
retry_func(lambda: driver.get(pageUrl), name="online search")
retry_func(lambda: driver.get(pageUrl), name=f"online search:{name}")
search_box = retry_func(
lambda: wait.until(
EC.presence_of_element_located((By.XPATH, '//input[@type="text"]'))
@ -458,22 +456,26 @@ def get_results_from_soup(soup, name):
return results
async def get_speed(url, urlTimeout=10):
async def get_speed(url, urlTimeout=timeout):
"""
Get the speed of the url
"""
async with aiohttp.ClientSession() as session:
start = time()
try:
async with session.get(url, timeout=urlTimeout) as response:
resStatus = response.status
except:
return float("inf")
end = time()
if resStatus == 200:
return int(round((end - start) * 1000))
else:
return float("inf")
retry_options = ExponentialRetry(attempts=1, max_timeout=urlTimeout)
retry_client = RetryClient(raise_for_status=False, retry_options=retry_options)
start = time()
total = float("inf")
try:
async with retry_client.get(url) as response:
resStatus = response.status
end = time()
if resStatus == 200:
total = int(round((end - start) * 1000))
else:
total = float("inf")
except:
total = float("inf")
await retry_client.close()
return total
async def sort_urls_by_speed_and_resolution(infoList):