Skip to content

Commit

Permalink
Minor fixes
Browse files Browse the repository at this point in the history
Signed-off-by: anasty17 <[email protected]>
  • Loading branch information
anasty17 committed Jan 18, 2024
1 parent 8bf7606 commit daeaebc
Show file tree
Hide file tree
Showing 5 changed files with 53 additions and 37 deletions.
4 changes: 2 additions & 2 deletions bot/helper/ext_utils/bot_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,8 +134,8 @@ def getSizeBytes(size):

async def get_content_type(url):
try:
async with ClientSession(trust_env=True) as session:
async with session.get(url, verify_ssl=False) as response:
async with ClientSession() as session:
async with session.get(url, allow_redirects=True, ssl=False) as response:
return response.headers.get("Content-Type")
except:
return None
Expand Down
16 changes: 12 additions & 4 deletions bot/helper/mirror_utils/download_utils/jd_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,9 +91,7 @@ async def add_jd_download(listener, path):
return

try:
await wait_for(
retry_function(jdownloader.device.jd.version), timeout=5
)
await wait_for(retry_function(jdownloader.device.jd.version), timeout=5)
except:
is_connected = await sync_to_async(jdownloader.jdconnect)
if not is_connected:
Expand Down Expand Up @@ -151,12 +149,16 @@ async def add_jd_download(listener, path):

if not online_packages and corrupted_packages and error:
await listener.onDownloadError(error)
await retry_function(
jdownloader.device.linkgrabber.remove_links,
package_ids=corrupted_packages,
)
return

for pack in queued_downloads:
online = pack.get("onlineCount", 1)
if online == 0:
error = f"{pack.get('name', '')}. link: {listener.link}"
error = f"{pack.get('name', '')}"
LOGGER.error(error)
corrupted_packages.append(pack["uuid"])
continue
Expand Down Expand Up @@ -202,6 +204,12 @@ async def add_jd_download(listener, path):
name or "Download Not Added! Maybe some issues in jdownloader or site!"
)
await listener.onDownloadError(error)
if corrupted_packages or online_packages:
packages_to_remove = corrupted_packages + online_packages
await retry_function(
jdownloader.device.linkgrabber.remove_links,
package_ids=packages_to_remove,
)
return

jd_downloads[gid]["ids"] = online_packages
Expand Down
62 changes: 35 additions & 27 deletions bot/modules/rss.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ async def rssSub(_, message, pre_event):
if feed_link.startswith(("-inf", "-exf", "-c")):
await sendMessage(
message,
f"Wrong input in line {index}! Re-add only the mentioned line correctly! Read the example!",
f"Wrong input in line {index}! Add Title! Read the example!",
)
continue
inf_lists = []
Expand All @@ -120,8 +120,8 @@ async def rssSub(_, message, pre_event):
exf = None
cmd = None
try:
async with ClientSession(trust_env=True) as session:
async with session.get(feed_link) as res:
async with ClientSession() as session:
async with session.get(feed_link, ssl=False) as res:
html = await res.text()
rss_d = feedparse(html)
last_title = rss_d.entries[0]["title"]
Expand Down Expand Up @@ -173,17 +173,17 @@ async def rssSub(_, message, pre_event):
await sendMessage(message, emsg + "\nError: " + str(e))
except Exception as e:
await sendMessage(message, str(e))
if DATABASE_URL:
await DbManager().rss_update(user_id)
if msg:
if DATABASE_URL and rss_dict[user_id]:
await DbManager().rss_update(user_id)
await sendMessage(message, msg)
is_sudo = await CustomFilters.sudo("", message)
if scheduler.state == 2:
scheduler.resume()
elif is_sudo and not scheduler.running:
addJob()
scheduler.start()
await updateRssMenu(pre_event)
is_sudo = await CustomFilters.sudo("", message)
if scheduler.state == 2:
scheduler.resume()
elif is_sudo and not scheduler.running:
addJob()
scheduler.start()


async def getUserId(title):
Expand Down Expand Up @@ -240,12 +240,14 @@ async def rssUpdate(_, message, pre_event, state):
await DbManager().rss_delete(user_id)
if not rss_dict:
await DbManager().trunc_table("rss")
LOGGER.info(f"Rss link with Title(s): {updated} has been {state}d!")
await sendMessage(
message, f"Rss links with Title(s): <code>{updated}</code> has been {state}d!"
)
if DATABASE_URL and rss_dict.get(user_id):
await DbManager().rss_update(user_id)
if updated:
LOGGER.info(f"Rss link with Title(s): {updated} has been {state}d!")
await sendMessage(
message,
f"Rss links with Title(s): <code>{updated}</code> has been {state}d!",
)
if DATABASE_URL and rss_dict.get(user_id):
await DbManager().rss_update(user_id)
await updateRssMenu(pre_event)


Expand All @@ -259,7 +261,7 @@ async def rssList(query, start, all_users=False):
index = 0
for titles in list(rss_dict.values()):
for index, (title, data) in enumerate(
list(titles.items())[start: 5 + start]
list(titles.items())[start : 5 + start]
):
list_feed += f"\n\n<b>Title:</b> <code>{title}</code>\n"
list_feed += f"<b>Feed Url:</b> <code>{data['link']}</code>\n"
Expand All @@ -275,7 +277,7 @@ async def rssList(query, start, all_users=False):
list_feed = f"<b>Your subscriptions | Page: {int(start / 5)} </b>"
async with rss_dict_lock:
keysCount = len(rss_dict.get(user_id, {}).keys())
for title, data in list(rss_dict[user_id].items())[start: 5 + start]:
for title, data in list(rss_dict[user_id].items())[start : 5 + start]:
list_feed += f"\n\n<b>Title:</b> <code>{title}</code>\n<b>Feed Url: </b><code>{data['link']}</code>\n"
list_feed += f"<b>Command:</b> <code>{data['command']}</code>\n"
list_feed += f"<b>Inf:</b> <code>{data['inf']}</code>\n"
Expand All @@ -285,7 +287,9 @@ async def rssList(query, start, all_users=False):
buttons.ibutton("Close", f"rss close {user_id}")
if keysCount > 5:
for x in range(0, keysCount, 5):
buttons.ibutton(f"{int(x / 5)}", f"rss list {user_id} {x}", position="footer")
buttons.ibutton(
f"{int(x / 5)}", f"rss list {user_id} {x}", position="footer"
)
button = buttons.build_menu(2)
if query.message.text.html == list_feed:
return
Expand All @@ -312,8 +316,8 @@ async def rssGet(_, message, pre_event):
msg = await sendMessage(
message, f"Getting the last <b>{count}</b> item(s) from {title}"
)
async with ClientSession(trust_env=True) as session:
async with session.get(data["link"]) as res:
async with ClientSession() as session:
async with session.get(data["link"], ssl=False) as res:
html = await res.text()
rss_d = feedparse(html)
item_info = ""
Expand Down Expand Up @@ -350,6 +354,7 @@ async def rssEdit(_, message, pre_event):
user_id = message.from_user.id
handler_dict[user_id] = False
items = message.text.split("\n")
updated = False
for item in items:
args = item.split()
title = args[0].strip()
Expand All @@ -362,6 +367,7 @@ async def rssEdit(_, message, pre_event):
elif not rss_dict[user_id].get(title, False):
await sendMessage(message, "Enter a valid title. Title not found!")
continue
updated = True
inf_lists = []
exf_lists = []
arg = item.split(" -c ", 1)
Expand Down Expand Up @@ -389,7 +395,7 @@ async def rssEdit(_, message, pre_event):
y = x.split(" or ")
exf_lists.append(y)
rss_dict[user_id][title]["exf"] = exf_lists
if DATABASE_URL:
if DATABASE_URL and updated:
await DbManager().rss_update(user_id)
await updateRssMenu(pre_event)

Expand Down Expand Up @@ -634,8 +640,8 @@ async def rssMonitor():
try:
if data["paused"]:
continue
async with ClientSession(trust_env=True) as session:
async with session.get(data["link"]) as res:
async with ClientSession() as session:
async with session.get(data["link"], ssl=False) as res:
html = await res.text()
rss_d = feedparse(html)
try:
Expand Down Expand Up @@ -668,12 +674,14 @@ async def rssMonitor():
break
parse = True
for flist in data["inf"]:
if all(x not in item_title.lower() for x in flist):
if all(x not in item_title for x in flist):
parse = False
feed_count += 1
break
if not parse:
continue
for flist in data["exf"]:
if any(x in item_title.lower() for x in flist):
if any(x in item_title for x in flist):
parse = False
feed_count += 1
break
Expand Down
4 changes: 2 additions & 2 deletions bot/modules/torrent_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ async def initiate_search_tools():
if SEARCH_API_LINK := config_dict["SEARCH_API_LINK"]:
global SITES
try:
async with ClientSession(trust_env=True) as c:
async with ClientSession() as c:
async with c.get(f"{SEARCH_API_LINK}/api/v1/sites") as res:
data = await res.json()
SITES = {
Expand Down Expand Up @@ -76,7 +76,7 @@ async def _search(key, site, message, method):
f"{SEARCH_API_LINK}/api/v1/recent?site={site}&limit={SEARCH_LIMIT}"
)
try:
async with ClientSession(trust_env=True) as c:
async with ClientSession() as c:
async with c.get(api) as res:
search_results = await res.json()
if "error" in search_results or search_results["total"] == 0:
Expand Down
4 changes: 2 additions & 2 deletions myjd/myjdapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -1170,7 +1170,7 @@ def request_api(self, path, http_method="GET", params=None, action=None, api=Non
)
]
query = query[0] + "&".join(query[1:])
encrypted_response = get(api + query, timeout=3, verify=False)
encrypted_response = get(api + query, timeout=2, verify=False)
else:
params_request = []
if params is not None:
Expand Down Expand Up @@ -1198,7 +1198,7 @@ def request_api(self, path, http_method="GET", params=None, action=None, api=Non
request_url,
headers={"Content-Type": "application/aesjson-jd; charset=utf-8"},
data=encrypted_data,
timeout=3,
timeout=2,
verify=False,
)
except RequestException as e:
Expand Down

0 comments on commit daeaebc

Please sign in to comment.