Skip to content

Commit

Permalink
- Added guidance for "Reject Blocklisted Torrent Hashes While Grabbing"
Browse files Browse the repository at this point in the history
- Added more debug logs
  • Loading branch information
ManiMatter committed Mar 2, 2024
1 parent 56dd1d1 commit 17370a4
Show file tree
Hide file tree
Showing 9 changed files with 38 additions and 30 deletions.
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,9 @@ You may run this locally by launching main.py, or by pulling the docker image.
You can find a sample docker-compose.yml in the docker folder.

## Dependencies & Hints
Use Sonarr v4 & Radarr v5 (currently 'nightly' tag instead of 'latest'), else certain features may not work correctly.
qBittorrent is recommended but not required. If you don't use qBittorrent, certain features won't work (such as tag-protection)
If you see strange errors such as "found 10 / 3 times", consider turning on the setting "Reject Blocklisted Torrent Hashes While Grabbing" on indexer-level (available in the nightly versions of sonarr/radarr (Untested: setting by now may also exist in readarr & lidarr))
- Use Sonarr v4 & Radarr v5 (currently 'nightly' tag instead of 'latest'), else certain features may not work correctly.
- qBittorrent is recommended but not required. If you don't use qBittorrent, certain features won't work (such as tag-protection)
- If you see strange errors such as "found 10 / 3 times", consider turning on the setting "Reject Blocklisted Torrent Hashes While Grabbing" on indexer-level (available in the nightly versions of the *arr apps)

## Getting started
There's two ways to run this:
Expand Down
2 changes: 1 addition & 1 deletion src/remove_failed.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ async def remove_failed(settings_dict, BASE_URL, API_KEY, NAME, deleted_download
try:
failType = 'failed'
queue = await get_queue(BASE_URL, API_KEY)
if not queue: return 0
logger.debug('remove_failed/queue IN: %s', formattedQueueInfo(queue))
if not queue: return 0
# Find items affected
affectedItems = []
for queueItem in queue['records']:
Expand Down
2 changes: 1 addition & 1 deletion src/remove_metadata_missing.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ async def remove_metadata_missing(settings_dict, BASE_URL, API_KEY, NAME, delete
try:
failType = 'missing metadata'
queue = await get_queue(BASE_URL, API_KEY)
if not queue: return 0
logger.debug('remove_metadata_missing/queue IN: %s', formattedQueueInfo(queue))
if not queue: return 0
# Find items affected
affectedItems = []
for queueItem in queue['records']:
Expand Down
2 changes: 1 addition & 1 deletion src/remove_missing_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ async def remove_missing_files(settings_dict, BASE_URL, API_KEY, NAME, deleted_d
try:
failType = 'missing files'
queue = await get_queue(BASE_URL, API_KEY)
if not queue: return 0
logger.debug('remove_missing_files/queue IN: %s', formattedQueueInfo(queue))
if not queue: return 0
# Find items affected
affectedItems = []
for queueItem in queue['records']:
Expand Down
2 changes: 1 addition & 1 deletion src/remove_orphans.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@ async def remove_orphans(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloa
try:
failType = 'orphan'
full_queue = await get_queue(BASE_URL, API_KEY, params = {full_queue_param: True})
if not full_queue: return 0 # By now the queue may be empty
queue = await get_queue(BASE_URL, API_KEY)
logger.debug('remove_orphans/full queue IN: %s', str(full_queue))
if not full_queue: return 0 # By now the queue may be empty
logger.debug('remove_orphans/queue IN: %s', str(queue))

# Find items affected
Expand Down
2 changes: 1 addition & 1 deletion src/remove_slow.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ async def remove_slow(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloads,
failType = 'slow'
queue = await get_queue(BASE_URL, API_KEY)
logger.debug('remove_slow/queue IN: %s', formattedQueueInfo(queue))
if not queue: return 0
if not queue: return 0
# Find items affected
affectedItems = []
alreadyCheckedDownloadIDs = []
Expand Down
2 changes: 1 addition & 1 deletion src/remove_stalled.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ async def remove_stalled(settings_dict, BASE_URL, API_KEY, NAME, deleted_downloa
failType = 'stalled'
queue = await get_queue(BASE_URL, API_KEY)
logger.debug('remove_stalled/queue IN: %s', formattedQueueInfo(queue))
if not queue: return 0
if not queue: return 0
# Find items affected
affectedItems = []
for queueItem in queue['records']:
Expand Down
2 changes: 1 addition & 1 deletion src/remove_unmonitored.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ async def remove_unmonitored(settings_dict, BASE_URL, API_KEY, NAME, deleted_dow
failType = 'unmonitored'
queue = await get_queue(BASE_URL, API_KEY)
logger.debug('remove_unmonitored/queue IN: %s', formattedQueueInfo(queue))
if not queue: return 0
if not queue: return 0
# Find items affected
monitoredDownloadIDs = []
for queueItem in queue['records']:
Expand Down
48 changes: 28 additions & 20 deletions src/utils/shared.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,14 +82,17 @@ def permittedAttemptsCheck(settings_dict, affectedItems, failType, BASE_URL, def
try:
defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts'] += 1
except KeyError:
add_keys_nested_dict(defective_tracker.dict,[BASE_URL, failType, affectedItem['downloadId']], {'title': affectedItem['title'], 'Attempts': 1})
logger.info('>>> Detected %s download (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']), str(settings_dict['PERMITTED_ATTEMPTS']), affectedItem['title'])
add_keys_nested_dict(defective_tracker.dict,[BASE_URL, failType, affectedItem['downloadId']], {'title': affectedItem['title'], 'Attempts': 1})
attempts_left = settings_dict['PERMITTED_ATTEMPTS'] - defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']
# If not exceeding the number of permitted times, remove from being affected
if defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts'] <= settings_dict['PERMITTED_ATTEMPTS']:
if attempts_left >= 0: # Still got attempts left
logger.info('>>> Detected %s download (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']), str(settings_dict['PERMITTED_ATTEMPTS']), affectedItem['title'])
affectedItems.remove(affectedItem)
# else:
# # Will be deleted - reset the counter to 0
# del defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]
if attempts_left <= -1: # Too many attempts
logger.info('>>> Detected %s download too many times (%s out of %s permitted times): %s', failType, str(defective_tracker.dict[BASE_URL][failType][affectedItem['downloadId']]['Attempts']), str(settings_dict['PERMITTED_ATTEMPTS']), affectedItem['title'])
if attempts_left < -2: # Too many attempts and should already have been removed
# If supposedly deleted item keeps coming back, print out guidance for "Reject Blocklisted Torrent Hashes While Grabbing"
logger.verbose('>>> [Tip!] Since this download should already have been removed in a previous iteration but keeps coming back, this indicates the blocking of the torrent does not work correctly. Consider turning on the option "Reject Blocklisted Torrent Hashes While Grabbing" on the indexer in the *arr app: %s', affectedItem['title'])
logger.debug('permittedAttemptsCheck/defective_tracker.dict OUT: %s', str(defective_tracker.dict))
return affectedItems

Expand All @@ -114,17 +117,22 @@ def errorDetails(NAME, error):
return

def formattedQueueInfo(queue):
# Returns queueID, title, and downloadID
formatted_list = []
for record in queue['records']:
download_id = record['downloadId']
title = record['title']
item_id = record['id']
# Check if there is an entry with the same download_id and title
existing_entry = next((item for item in formatted_list if item['downloadId'] == download_id), None)
if existing_entry:
existing_entry['IDs'].append(item_id)
else:
new_entry = {'downloadId': download_id, 'downloadTitle': title, 'IDs': [item_id]}
formatted_list.append(new_entry)
return(formatted_list)
try:
# Returns queueID, title, and downloadID
if not queue: return 'empty'
formatted_list = []
for record in queue['records']:
download_id = record['downloadId']
title = record['title']
item_id = record['id']
# Check if there is an entry with the same download_id and title
existing_entry = next((item for item in formatted_list if item['downloadId'] == download_id), None)
if existing_entry:
existing_entry['IDs'].append(item_id)
else:
new_entry = {'downloadId': download_id, 'downloadTitle': title, 'IDs': [item_id]}
formatted_list.append(new_entry)
return(formatted_list)
except Exception as error:
errorDetails('formattedQueueInfo', error)
return 'error'

0 comments on commit 17370a4

Please sign in to comment.