refactor
This commit is contained in:
parent
bd6f45fb3d
commit
e7722f104b
@ -46,7 +46,7 @@ async def get_channels_by_fofa(callback):
|
||||
proxy = await get_proxy(fofa_urls[0], best=True, with_test=True)
|
||||
driver = setup_driver(proxy)
|
||||
|
||||
async def process_fofa_channels(fofa_url, fofa_urls_len):
|
||||
def process_fofa_channels(fofa_url, fofa_urls_len):
|
||||
try:
|
||||
retry_func(lambda: driver.get(fofa_url), name=fofa_url)
|
||||
fofa_source = re.sub(r"<!--.*?-->", "", driver.page_source, flags=re.DOTALL)
|
||||
|
1
main.py
1
main.py
@ -86,7 +86,6 @@ class UpdateSource:
|
||||
self.total = len(channel_names)
|
||||
await self.visit_page(channel_names)
|
||||
self.tasks = []
|
||||
self.process_channel()
|
||||
self.channel_data = append_all_method_data(
|
||||
self.channel_items.items(),
|
||||
self.channel_data,
|
||||
|
@ -69,7 +69,7 @@ async def get_channels_by_online_search(names, callback):
|
||||
start_time = time()
|
||||
driver = setup_driver(proxy)
|
||||
|
||||
async def process_channel_by_online_search(name):
|
||||
def process_channel_by_online_search(name):
|
||||
info_list = []
|
||||
try:
|
||||
retry_func(lambda: driver.get(pageUrl), name=f"online search:{name}")
|
||||
@ -116,6 +116,7 @@ async def get_channels_by_online_search(names, callback):
|
||||
)
|
||||
if next_page_link:
|
||||
search_submit(driver, name)
|
||||
retries += 1
|
||||
continue
|
||||
for result in results:
|
||||
url, date, resolution = result
|
||||
|
@ -239,11 +239,6 @@ def append_all_method_data(
|
||||
len(online_search_result.get(formatName, [])),
|
||||
)
|
||||
total_channel_data_len = len(data.get(cate, {}).get(name, []))
|
||||
print(
|
||||
name,
|
||||
"total num:",
|
||||
total_channel_data_len,
|
||||
)
|
||||
if total_channel_data_len == 0:
|
||||
data = append_data_to_info_data(
|
||||
data,
|
||||
@ -251,6 +246,11 @@ def append_all_method_data(
|
||||
name,
|
||||
[(url, None, None) for url in old_urls],
|
||||
)
|
||||
print(
|
||||
name,
|
||||
"total num:",
|
||||
len(data.get(cate, {}).get(name, [])),
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
@ -261,25 +261,26 @@ async def sort_channel_list(semaphore, cate, name, info_list, callback):
|
||||
async with semaphore:
|
||||
data = []
|
||||
try:
|
||||
sorted_data = await sort_urls_by_speed_and_resolution(info_list)
|
||||
if sorted_data:
|
||||
for (
|
||||
url,
|
||||
date,
|
||||
resolution,
|
||||
), response_time in sorted_data:
|
||||
logging.info(
|
||||
f"Name: {name}, URL: {url}, Date: {date}, Resolution: {resolution}, Response Time: {response_time}ms"
|
||||
)
|
||||
data = [
|
||||
(url, date, resolution)
|
||||
for (url, date, resolution), _ in sorted_data
|
||||
]
|
||||
if info_list:
|
||||
sorted_data = await sort_urls_by_speed_and_resolution(info_list)
|
||||
if sorted_data:
|
||||
for (
|
||||
url,
|
||||
date,
|
||||
resolution,
|
||||
), response_time in sorted_data:
|
||||
logging.info(
|
||||
f"Name: {name}, URL: {url}, Date: {date}, Resolution: {resolution}, Response Time: {response_time}ms"
|
||||
)
|
||||
data = [
|
||||
(url, date, resolution)
|
||||
for (url, date, resolution), _ in sorted_data
|
||||
]
|
||||
except Exception as e:
|
||||
logging.error(f"Error: {e}")
|
||||
finally:
|
||||
callback()
|
||||
return {cate, name, data}
|
||||
return {cate: cate, name: name, data: data}
|
||||
|
||||
|
||||
def write_channel_to_file(items, data, callback):
|
||||
|
Loading…
x
Reference in New Issue
Block a user