chore:timeout
This commit is contained in:
parent
b2bb8be774
commit
316c72adae
fofa
online_search
requests_custom
subscribe
utils
@ -13,7 +13,7 @@ from proxy import get_proxy, get_proxy_next
|
||||
from requests_custom.utils import get_source_requests, close_session
|
||||
|
||||
config = get_config()
|
||||
timeout = 10
|
||||
timeout = 30
|
||||
|
||||
|
||||
def get_fofa_urls_from_region_list():
|
||||
|
@ -65,7 +65,8 @@ async def get_channels_by_online_search(names, callback):
|
||||
Get the channels by online search
|
||||
"""
|
||||
channels = {}
|
||||
pageUrl = await use_accessible_url(callback)
|
||||
# pageUrl = await use_accessible_url(callback)
|
||||
pageUrl = "http://tonkiang.us/"
|
||||
if not pageUrl:
|
||||
return channels
|
||||
proxy = None
|
||||
|
@ -19,12 +19,12 @@ headers = {
|
||||
session = requests.Session()
|
||||
|
||||
|
||||
def get_source_requests(url, proxy=None):
|
||||
def get_source_requests(url, proxy=None, timeout=30):
|
||||
"""
|
||||
Get the source by requests
|
||||
"""
|
||||
proxies = {"http": proxy}
|
||||
response = session.get(url, headers=headers, proxies=proxies, timeout=30)
|
||||
response = session.get(url, headers=headers, proxies=proxies, timeout=timeout)
|
||||
source = re.sub(
|
||||
r"<!--.*?-->",
|
||||
"",
|
||||
@ -34,11 +34,11 @@ def get_source_requests(url, proxy=None):
|
||||
return source
|
||||
|
||||
|
||||
def get_soup_requests(url, proxy=None):
|
||||
def get_soup_requests(url, proxy=None, timeout=30):
|
||||
"""
|
||||
Get the soup by requests
|
||||
"""
|
||||
source = get_source_requests(url, proxy)
|
||||
source = get_source_requests(url, proxy, timeout)
|
||||
soup = BeautifulSoup(source, "html.parser")
|
||||
return soup
|
||||
|
||||
|
@ -10,7 +10,7 @@ from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
|
||||
config = get_config()
|
||||
timeout = 10
|
||||
timeout = 30
|
||||
|
||||
|
||||
async def get_channels_by_subscribe_urls(callback):
|
||||
|
@ -4,7 +4,7 @@ from selenium.webdriver.support import expected_conditions as EC
|
||||
from selenium.common.exceptions import TimeoutException
|
||||
|
||||
max_retries = 3
|
||||
timeout = 10
|
||||
timeout = 15
|
||||
|
||||
|
||||
def retry_func(func, retries=max_retries + 1, name=""):
|
||||
@ -13,13 +13,15 @@ def retry_func(func, retries=max_retries + 1, name=""):
|
||||
"""
|
||||
for i in range(retries):
|
||||
try:
|
||||
sleep(3)
|
||||
sleep(1)
|
||||
return func()
|
||||
except Exception as e:
|
||||
if name and i < retries - 1:
|
||||
print(f"Failed to connect to the {name}. Retrying {i+1}...")
|
||||
elif i == retries - 1:
|
||||
raise Exception(f"Failed to connect to the {name} reached the maximum retries.")
|
||||
raise Exception(
|
||||
f"Failed to connect to the {name} reached the maximum retries."
|
||||
)
|
||||
raise Exception(f"Failed to connect to the {name} reached the maximum retries.")
|
||||
|
||||
|
||||
|
@ -6,7 +6,7 @@ from utils.config import get_config
|
||||
|
||||
|
||||
config = get_config()
|
||||
timeout = 10
|
||||
timeout = 15
|
||||
|
||||
|
||||
async def get_speed(url, timeout=timeout, proxy=None):
|
||||
|
@ -10,7 +10,6 @@ import re
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
config = get_config()
|
||||
timeout = 10
|
||||
|
||||
|
||||
def get_pbar_remaining(pbar, start_time):
|
||||
@ -69,6 +68,7 @@ def filter_by_date(data):
|
||||
recent_data.extend(unrecent_data[: config.urls_limit - len(recent_data)])
|
||||
return recent_data
|
||||
|
||||
|
||||
def get_soup(source):
|
||||
"""
|
||||
Get soup from source
|
||||
@ -82,6 +82,7 @@ def get_soup(source):
|
||||
soup = BeautifulSoup(source, "html.parser")
|
||||
return soup
|
||||
|
||||
|
||||
def get_total_urls_from_info_list(infoList):
|
||||
"""
|
||||
Get the total urls from info list
|
||||
|
Loading…
x
Reference in New Issue
Block a user