diff --git a/bot/helper/ext_utils/bot_utils.py b/bot/helper/ext_utils/bot_utils.py
index b266faa0fb1..75fe1ac52ef 100644
--- a/bot/helper/ext_utils/bot_utils.py
+++ b/bot/helper/ext_utils/bot_utils.py
@@ -134,8 +134,8 @@ def getSizeBytes(size):
async def get_content_type(url):
try:
- async with ClientSession(trust_env=True) as session:
- async with session.get(url, verify_ssl=False) as response:
+ async with ClientSession() as session:
+ async with session.get(url, allow_redirects=True, ssl=False) as response:
return response.headers.get("Content-Type")
except:
return None
diff --git a/bot/helper/mirror_utils/download_utils/jd_download.py b/bot/helper/mirror_utils/download_utils/jd_download.py
index 105a9d8a12d..ca9f400777a 100644
--- a/bot/helper/mirror_utils/download_utils/jd_download.py
+++ b/bot/helper/mirror_utils/download_utils/jd_download.py
@@ -91,9 +91,7 @@ async def add_jd_download(listener, path):
return
try:
- await wait_for(
- retry_function(jdownloader.device.jd.version), timeout=5
- )
+ await wait_for(retry_function(jdownloader.device.jd.version), timeout=5)
except:
is_connected = await sync_to_async(jdownloader.jdconnect)
if not is_connected:
@@ -151,12 +149,16 @@ async def add_jd_download(listener, path):
if not online_packages and corrupted_packages and error:
await listener.onDownloadError(error)
+ await retry_function(
+ jdownloader.device.linkgrabber.remove_links,
+ package_ids=corrupted_packages,
+ )
return
for pack in queued_downloads:
online = pack.get("onlineCount", 1)
if online == 0:
- error = f"{pack.get('name', '')}. link: {listener.link}"
+ error = f"{pack.get('name', '')}"
LOGGER.error(error)
corrupted_packages.append(pack["uuid"])
continue
@@ -202,6 +204,12 @@ async def add_jd_download(listener, path):
name or "Download Not Added! Maybe some issues in jdownloader or site!"
)
await listener.onDownloadError(error)
+ if corrupted_packages or online_packages:
+ packages_to_remove = corrupted_packages + online_packages
+ await retry_function(
+ jdownloader.device.linkgrabber.remove_links,
+ package_ids=packages_to_remove,
+ )
return
jd_downloads[gid]["ids"] = online_packages
diff --git a/bot/modules/rss.py b/bot/modules/rss.py
index e71dda20d17..18474aa7d75 100644
--- a/bot/modules/rss.py
+++ b/bot/modules/rss.py
@@ -93,7 +93,7 @@ async def rssSub(_, message, pre_event):
if feed_link.startswith(("-inf", "-exf", "-c")):
await sendMessage(
message,
- f"Wrong input in line {index}! Re-add only the mentioned line correctly! Read the example!",
+ f"Wrong input in line {index}! Add Title! Read the example!",
)
continue
inf_lists = []
@@ -120,8 +120,8 @@ async def rssSub(_, message, pre_event):
exf = None
cmd = None
try:
- async with ClientSession(trust_env=True) as session:
- async with session.get(feed_link) as res:
+ async with ClientSession() as session:
+ async with session.get(feed_link, ssl=False) as res:
html = await res.text()
rss_d = feedparse(html)
last_title = rss_d.entries[0]["title"]
@@ -173,17 +173,17 @@ async def rssSub(_, message, pre_event):
await sendMessage(message, emsg + "\nError: " + str(e))
except Exception as e:
await sendMessage(message, str(e))
- if DATABASE_URL:
- await DbManager().rss_update(user_id)
if msg:
+ if DATABASE_URL and rss_dict[user_id]:
+ await DbManager().rss_update(user_id)
await sendMessage(message, msg)
+ is_sudo = await CustomFilters.sudo("", message)
+ if scheduler.state == 2:
+ scheduler.resume()
+ elif is_sudo and not scheduler.running:
+ addJob()
+ scheduler.start()
await updateRssMenu(pre_event)
- is_sudo = await CustomFilters.sudo("", message)
- if scheduler.state == 2:
- scheduler.resume()
- elif is_sudo and not scheduler.running:
- addJob()
- scheduler.start()
async def getUserId(title):
@@ -240,12 +240,14 @@ async def rssUpdate(_, message, pre_event, state):
await DbManager().rss_delete(user_id)
if not rss_dict:
await DbManager().trunc_table("rss")
- LOGGER.info(f"Rss link with Title(s): {updated} has been {state}d!")
- await sendMessage(
- message, f"Rss links with Title(s): {updated}
has been {state}d!"
- )
- if DATABASE_URL and rss_dict.get(user_id):
- await DbManager().rss_update(user_id)
+ if updated:
+ LOGGER.info(f"Rss link with Title(s): {updated} has been {state}d!")
+ await sendMessage(
+ message,
+ f"Rss links with Title(s): {updated}
has been {state}d!",
+ )
+ if DATABASE_URL and rss_dict.get(user_id):
+ await DbManager().rss_update(user_id)
await updateRssMenu(pre_event)
@@ -259,7 +261,7 @@ async def rssList(query, start, all_users=False):
index = 0
for titles in list(rss_dict.values()):
for index, (title, data) in enumerate(
- list(titles.items())[start: 5 + start]
+ list(titles.items())[start : 5 + start]
):
list_feed += f"\n\nTitle: {title}
\n"
list_feed += f"Feed Url: {data['link']}
\n"
@@ -275,7 +277,7 @@ async def rssList(query, start, all_users=False):
list_feed = f"Your subscriptions | Page: {int(start / 5)} "
async with rss_dict_lock:
keysCount = len(rss_dict.get(user_id, {}).keys())
- for title, data in list(rss_dict[user_id].items())[start: 5 + start]:
+ for title, data in list(rss_dict[user_id].items())[start : 5 + start]:
list_feed += f"\n\nTitle: {title}
\nFeed Url: {data['link']}
\n"
list_feed += f"Command: {data['command']}
\n"
list_feed += f"Inf: {data['inf']}
\n"
@@ -285,7 +287,9 @@ async def rssList(query, start, all_users=False):
buttons.ibutton("Close", f"rss close {user_id}")
if keysCount > 5:
for x in range(0, keysCount, 5):
- buttons.ibutton(f"{int(x / 5)}", f"rss list {user_id} {x}", position="footer")
+ buttons.ibutton(
+ f"{int(x / 5)}", f"rss list {user_id} {x}", position="footer"
+ )
button = buttons.build_menu(2)
if query.message.text.html == list_feed:
return
@@ -312,8 +316,8 @@ async def rssGet(_, message, pre_event):
msg = await sendMessage(
message, f"Getting the last {count} item(s) from {title}"
)
- async with ClientSession(trust_env=True) as session:
- async with session.get(data["link"]) as res:
+ async with ClientSession() as session:
+ async with session.get(data["link"], ssl=False) as res:
html = await res.text()
rss_d = feedparse(html)
item_info = ""
@@ -350,6 +354,7 @@ async def rssEdit(_, message, pre_event):
user_id = message.from_user.id
handler_dict[user_id] = False
items = message.text.split("\n")
+ updated = False
for item in items:
args = item.split()
title = args[0].strip()
@@ -362,6 +367,7 @@ async def rssEdit(_, message, pre_event):
elif not rss_dict[user_id].get(title, False):
await sendMessage(message, "Enter a valid title. Title not found!")
continue
+ updated = True
inf_lists = []
exf_lists = []
arg = item.split(" -c ", 1)
@@ -389,7 +395,7 @@ async def rssEdit(_, message, pre_event):
y = x.split(" or ")
exf_lists.append(y)
rss_dict[user_id][title]["exf"] = exf_lists
- if DATABASE_URL:
+ if DATABASE_URL and updated:
await DbManager().rss_update(user_id)
await updateRssMenu(pre_event)
@@ -634,8 +640,8 @@ async def rssMonitor():
try:
if data["paused"]:
continue
- async with ClientSession(trust_env=True) as session:
- async with session.get(data["link"]) as res:
+ async with ClientSession() as session:
+ async with session.get(data["link"], ssl=False) as res:
html = await res.text()
rss_d = feedparse(html)
try:
@@ -668,12 +674,14 @@ async def rssMonitor():
break
parse = True
for flist in data["inf"]:
- if all(x not in item_title.lower() for x in flist):
+ if all(x not in item_title for x in flist):
parse = False
feed_count += 1
break
+ if not parse:
+ continue
for flist in data["exf"]:
- if any(x in item_title.lower() for x in flist):
+ if any(x in item_title for x in flist):
parse = False
feed_count += 1
break
diff --git a/bot/modules/torrent_search.py b/bot/modules/torrent_search.py
index 3435c8a24d5..409ec7acc1c 100644
--- a/bot/modules/torrent_search.py
+++ b/bot/modules/torrent_search.py
@@ -37,7 +37,7 @@ async def initiate_search_tools():
if SEARCH_API_LINK := config_dict["SEARCH_API_LINK"]:
global SITES
try:
- async with ClientSession(trust_env=True) as c:
+ async with ClientSession() as c:
async with c.get(f"{SEARCH_API_LINK}/api/v1/sites") as res:
data = await res.json()
SITES = {
@@ -76,7 +76,7 @@ async def _search(key, site, message, method):
f"{SEARCH_API_LINK}/api/v1/recent?site={site}&limit={SEARCH_LIMIT}"
)
try:
- async with ClientSession(trust_env=True) as c:
+ async with ClientSession() as c:
async with c.get(api) as res:
search_results = await res.json()
if "error" in search_results or search_results["total"] == 0:
diff --git a/myjd/myjdapi.py b/myjd/myjdapi.py
index cd5b6755d40..a79f1a57e01 100644
--- a/myjd/myjdapi.py
+++ b/myjd/myjdapi.py
@@ -1170,7 +1170,7 @@ def request_api(self, path, http_method="GET", params=None, action=None, api=Non
)
]
query = query[0] + "&".join(query[1:])
- encrypted_response = get(api + query, timeout=3, verify=False)
+ encrypted_response = get(api + query, timeout=2, verify=False)
else:
params_request = []
if params is not None:
@@ -1198,7 +1198,7 @@ def request_api(self, path, http_method="GET", params=None, action=None, api=Non
request_url,
headers={"Content-Type": "application/aesjson-jd; charset=utf-8"},
data=encrypted_data,
- timeout=3,
+ timeout=2,
verify=False,
)
except RequestException as e: