diff --git a/.gitignore b/.gitignore index 5d9fdefd14..9af244f597 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ # Medusa User Related # ###################### /cache/ +/cache-*/ /Logs/ /data/ restore/ diff --git a/CHANGELOG.md b/CHANGELOG.md index 22461fb996..2b9c654955 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,14 @@ +## 0.5.29 (11-04-2022) + +#### New Features +- Support for Plex metadata (.plexmatch) ([10466](https://github.com/pymedusa/Medusa/pull/10466)) + +#### Improvements +- Make the cache db and cache files optional for inclusion in the backup ([10475](https://github.com/pymedusa/Medusa/pull/10475)) + +#### Fixes +- Fix joining segments in log for failed episodes ([10472](https://github.com/pymedusa/Medusa/pull/10472)) + ## 0.5.28 (01-04-2022) #### Improvements diff --git a/medusa/__main__.py b/medusa/__main__.py index e3ba7ad89e..2087550a62 100755 --- a/medusa/__main__.py +++ b/medusa/__main__.py @@ -325,7 +325,7 @@ def start(self, args): # Check if we need to perform a restore first restore_dir = os.path.join(app.DATA_DIR, 'restore') - if os.path.exists(restore_dir): + if os.path.exists(restore_dir) and os.listdir(restore_dir): success = self.restore_db(restore_dir, app.DATA_DIR) if self.console_logging: sys.stdout.write('Restore: restoring DB and config.ini %s!\n' % ('FAILED', 'SUCCESSFUL')[success]) @@ -1009,6 +1009,7 @@ def initialize(self, console_logging=True): app.METADATA_WDTV = check_setting_list(app.CFG, 'General', 'metadata_wdtv', ['0'] * 11, transform=int) app.METADATA_TIVO = check_setting_list(app.CFG, 'General', 'metadata_tivo', ['0'] * 11, transform=int) app.METADATA_MEDE8ER = check_setting_list(app.CFG, 'General', 'metadata_mede8er', ['0'] * 11, transform=int) + app.METADATA_PLEX = check_setting_list(app.CFG, 'General', 'metadata_plex', ['0'] * 11, transform=int) app.HOME_LAYOUT = check_setting_str(app.CFG, 'GUI', 'home_layout', 'poster') app.HISTORY_LAYOUT = check_setting_str(app.CFG, 'GUI', 'history_layout', 'detailed') @@ -1052,6 +1053,9 @@ def initialize(self, console_logging=True): app.CACHE_RECOMMENDED_TRAKT_LISTS = check_setting_list(app.CFG, 'Recommended', 'trakt_lists', app.CACHE_RECOMMENDED_TRAKT_LISTS) app.CACHE_RECOMMENDED_PURGE_AFTER_DAYS = check_setting_int(app.CFG, 'Recommended', 'purge_after_days', 180) + app.BACKUP_CACHE_DB = check_setting_int(app.CFG, 'Backup', 'cache_db', 1) + app.BACKUP_CACHE_FILES = check_setting_int(app.CFG, 'Backup', 'cache_files', 1) + # Initialize trakt config path. trakt.core.CONFIG_PATH = os.path.join(app.CACHE_DIR, '.pytrakt.json') trakt.core.load_config() @@ -1236,7 +1240,8 @@ def initialize(self, console_logging=True): (app.METADATA_PS3, metadata.ps3), (app.METADATA_WDTV, metadata.wdtv), (app.METADATA_TIVO, metadata.tivo), - (app.METADATA_MEDE8ER, metadata.mede8er)]: + (app.METADATA_MEDE8ER, metadata.mede8er), + (app.METADATA_PLEX, metadata.plex)]: (cur_metadata_config, cur_metadata_class) = cur_metadata_tuple tmp_provider = cur_metadata_class.metadata_class() tmp_provider.set_config(cur_metadata_config) @@ -1687,6 +1692,7 @@ def save_config(): new_config['General']['metadata_wdtv'] = app.METADATA_WDTV new_config['General']['metadata_tivo'] = app.METADATA_TIVO new_config['General']['metadata_mede8er'] = app.METADATA_MEDE8ER + new_config['General']['metadata_plex'] = app.METADATA_PLEX new_config['General']['backlog_days'] = int(app.BACKLOG_DAYS) @@ -1757,6 +1763,10 @@ def save_config(): new_config['Blackhole']['nzb_dir'] = app.NZB_DIR new_config['Blackhole']['torrent_dir'] = app.TORRENT_DIR + new_config['Backup'] = {} + new_config['Backup']['cache_db'] = int(app.BACKUP_CACHE_DB) + new_config['Backup']['cache_files'] = int(app.BACKUP_CACHE_FILES) + # dynamically save provider settings all_providers = providers.sorted_provider_list() for provider in all_providers: diff --git a/medusa/app.py b/medusa/app.py index 133a7ff8cf..82ba545c60 100644 --- a/medusa/app.py +++ b/medusa/app.py @@ -36,9 +36,14 @@ def __init__(self): self.EXT3_FOLDER = 'ext3' self.STATIC_FOLDER = 'static' self.UNKNOWN_RELEASE_GROUP = 'Medusa' + + # Backup related self.BACKUP_DIR = 'backup' self.BACKUP_FILENAME_PREFIX = 'backup' self.BACKUP_FILENAME = self.BACKUP_FILENAME_PREFIX + '-{timestamp}.zip' + self.BACKUP_CACHE_DB = None + self.BACKUP_CACHE_FILES = None + self.LEGACY_DB = 'sickbeard.db' self.APPLICATION_DB = 'main.db' self.FAILED_DB = 'failed.db' @@ -229,6 +234,7 @@ def __init__(self): self.METADATA_WDTV = [] self.METADATA_TIVO = [] self.METADATA_MEDE8ER = [] + self.METADATA_PLEX = [] self.QUALITY_DEFAULT = None self.STATUS_DEFAULT = None diff --git a/medusa/common.py b/medusa/common.py index 6579bf75b0..1cff15164d 100644 --- a/medusa/common.py +++ b/medusa/common.py @@ -39,7 +39,7 @@ log.logger.addHandler(logging.NullHandler()) INSTANCE_ID = text_type(uuid.uuid1()) -VERSION = '0.5.28' +VERSION = '0.5.29' USER_AGENT = 'Medusa/{version} ({system}; {release}; {instance})'.format( version=VERSION, system=platform.system(), release=platform.release(), diff --git a/medusa/failed_processor.py b/medusa/failed_processor.py index ce4f2ecae4..e44e9a0f4b 100644 --- a/medusa/failed_processor.py +++ b/medusa/failed_processor.py @@ -63,7 +63,7 @@ def _process_release_name(self): if segment: self.log(logger.DEBUG, 'Created segment of episodes [{segment}] from release: {release}'.format( - segment=','.join(ep.episode for ep in segment), + segment=','.join(str(ep.episode) for ep in segment), release=release_name )) diff --git a/medusa/metadata/__init__.py b/medusa/metadata/__init__.py index d379fc8e60..724f69aed7 100644 --- a/medusa/metadata/__init__.py +++ b/medusa/metadata/__init__.py @@ -27,6 +27,7 @@ kodi_12plus, mede8er, media_browser, + plex, ps3, tivo, wdtv, @@ -38,6 +39,7 @@ 'kodi_12plus', 'mede8er', 'media_browser', + 'plex', 'ps3', 'tivo', 'wdtv', diff --git a/medusa/metadata/generic.py b/medusa/metadata/generic.py index 27fb1dd28a..08991ee594 100644 --- a/medusa/metadata/generic.py +++ b/medusa/metadata/generic.py @@ -297,12 +297,12 @@ def update_show_indexer_metadata(self, show_obj): nfo_file_path = self.get_show_file_path(show_obj) try: - with io.open(nfo_file_path, 'rb') as xmlFileObj: - showXML = etree.ElementTree(file=xmlFileObj) + with io.open(nfo_file_path, 'rb') as xml_file_obj: + show_xml = etree.ElementTree(file=xml_file_obj) - indexerid = showXML.find('id') + indexerid = show_xml.find('id') - root = showXML.getroot() + root = show_xml.getroot() if indexerid is not None: indexerid.text = str(show_obj.indexerid) else: @@ -311,7 +311,7 @@ def update_show_indexer_metadata(self, show_obj): # Make it purdy helpers.indent_xml(root) - showXML.write(nfo_file_path, encoding='UTF-8') + show_xml.write(nfo_file_path, encoding='UTF-8') helpers.chmod_as_parent(nfo_file_path) return True @@ -967,31 +967,35 @@ def retrieveShowMetadata(self, folder): {'name': self.name, 'location': folder}) try: - with io.open(metadata_path, 'rb') as xmlFileObj: - showXML = etree.ElementTree(file=xmlFileObj) + with io.open(metadata_path, 'rb') as xml_file_obj: + show_xml = etree.ElementTree(file=xml_file_obj) - uniqueid = showXML.find("uniqueid[@default='true']") + uniqueid = show_xml.find("uniqueid[@default='true']") if ( - showXML.findtext('title') is None or - (showXML.findtext('tvdbid') is None and showXML.findtext('id') is None and showXML.find("uniqueid[@default='true']") is None) + show_xml.findtext('title') is None + or ( + show_xml.findtext('tvdbid') is None + and show_xml.findtext('id') is None + and show_xml.find("uniqueid[@default='true']") is None + ) ): log.debug( 'Invalid info in tvshow.nfo (missing name or id): {0} {1} {2}', - showXML.findtext('title'), showXML.findtext('tvdbid'), showXML.findtext('id'), + show_xml.findtext('title'), show_xml.findtext('tvdbid'), show_xml.findtext('id'), ) return empty_return - name = showXML.findtext('title') + name = show_xml.findtext('title') if uniqueid is not None and uniqueid.get('type') and indexer_name_mapping.get(uniqueid.get('type')): indexer = indexer_name_mapping.get(uniqueid.get('type')) indexer_id = int(ImdbIdentifier(uniqueid.text).series_id) else: # For legacy nfo's - if showXML.findtext('tvdbid'): - indexer_id = int(showXML.findtext('tvdbid')) - elif showXML.findtext('id'): - indexer_id = int(showXML.findtext('id')) + if show_xml.findtext('tvdbid'): + indexer_id = int(show_xml.findtext('tvdbid')) + elif show_xml.findtext('id'): + indexer_id = int(show_xml.findtext('id')) else: log.warning('Empty or field in NFO, unable to find a ID') return empty_return @@ -1002,8 +1006,8 @@ def retrieveShowMetadata(self, folder): return empty_return indexer = None - if showXML.findtext('episodeguide/url'): - epg_url = showXML.findtext('episodeguide/url').lower() + if show_xml.findtext('episodeguide/url'): + epg_url = show_xml.findtext('episodeguide/url').lower() if str(indexer_id) in epg_url: if 'thetvdb.com' in epg_url: indexer = INDEXER_TVDBV2 diff --git a/medusa/metadata/plex.py b/medusa/metadata/plex.py new file mode 100644 index 0000000000..270742ad4f --- /dev/null +++ b/medusa/metadata/plex.py @@ -0,0 +1,258 @@ +# coding=utf-8 +"""Plex metadata module.""" + +from __future__ import unicode_literals + +import io +import logging +import os + +from medusa import helpers +from medusa.indexers.imdb.api import ImdbIdentifier +from medusa.logger.adapters.style import BraceAdapter +from medusa.metadata import generic + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + + +class PlexMetadata(generic.GenericMetadata): + """ + Metadata generation class for Plex (.plexmatch). + + The following file structure is used: + show_root/.plexmatch (series level match file) + """ + + def __init__(self, + show_metadata=False, + episode_metadata=False, + fanart=False, + poster=False, + banner=False, + episode_thumbnails=False, + season_posters=False, + season_banners=False, + season_all_poster=False, + season_all_banner=False): + """Plex Metadata constructor.""" + generic.GenericMetadata.__init__(self, + show_metadata, + episode_metadata, + fanart, + poster, + banner, + episode_thumbnails, + season_posters, + season_banners, + season_all_poster, + season_all_banner) + + self.name = 'Plex' + self._show_metadata_filename = '.plexmatch' + + # web-ui metadata template + self.eg_show_metadata = '.plexmatch' + self.eg_episode_metadata = '.plexmatch' + # self.eg_fanart = 'not supported' + # self.eg_poster = 'cover.jpg' + # self.eg_banner = 'not supported' + # self.eg_episode_thumbnails = 'Season##\\filename.ext.cover.jpg' + # self.eg_season_posters = 'not supported' + # self.eg_season_banners = 'not supported' + # self.eg_season_all_poster = 'not supported' + # self.eg_season_all_banner = 'not supported' + + def _show_data(self, show_obj): + """ + Create a .plexmatch file. + + returns the resulting data object. + show_obj: a Series instance to create the .plexmatch for + """ + file_content = f'Title: {show_obj.title}\n' + file_content += f'Year: {show_obj.start_year}\n' + + # Add main indexer + externals = {} + if show_obj.identifier.indexer.slug in ('tvdb', 'tmdb', 'imdb'): + show_id = show_obj.identifier.id + if (show_obj.identifier.indexer.slug == 'imdb'): + show_id = ImdbIdentifier(show_id).imdb_id + + externals[f'{show_obj.identifier.indexer.slug}id'] = str(show_id) + + for indexer_slug in ('tvdb', 'tmdb', 'imdb'): + if indexer_slug == show_obj.identifier.indexer.slug: + continue + + external_id = show_obj.externals.get(f'{indexer_slug}_id') + if not external_id: + continue + + if (indexer_slug == 'imdb'): + external_id = ImdbIdentifier(show_id).imdb_id + + externals[f'{indexer_slug}id'] = str(external_id) + + for external, external_id in externals.items(): + file_content += f'{external}: {external_id}\n' + + return file_content + + def write_show_file(self, show_obj): + """ + Generate and write show_obj's metadata under the given path to the filename given by get_show_file_path(). + + show_obj: Series object for which to create the metadata + + Note that this method expects that _show_data will return a string, + which will be written to a text file. + """ + data = self._show_data(show_obj) + + if not data: + return False + + flexmatch_file_path = self.get_show_file_path(show_obj) + flexmatch_file_dir = os.path.dirname(flexmatch_file_path) + + try: + if not os.path.isdir(flexmatch_file_dir): + log.debug( + 'Metadata directory did not exist, creating it at {location}', + {'location': flexmatch_file_dir} + ) + os.makedirs(flexmatch_file_dir) + helpers.chmod_as_parent(flexmatch_file_dir) + + log.debug( + 'Writing show flexmatch file to {location}', + {'location': flexmatch_file_dir} + ) + + flexmatch_file = io.open(flexmatch_file_path, 'wb') + flexmatch_file.write(data.encode('utf-8')) + flexmatch_file.close() + helpers.chmod_as_parent(flexmatch_file_path) + except IOError as error: + log.error( + 'Unable to write file to {location} - are you sure the folder is writable? {error}', + {'location': flexmatch_file_path, 'error': error} + ) + return False + + return True + + def _ep_data(self, current_content, ep_obj): + """ + Create an array with show plus episode info. + + All existing lines are imported. And the line with episode info for this specific + episode is replaced. + + show_obj: a Episode instance to create the new episode / special line for. + """ + new_data = [] + episodes = [] + + for line in current_content: + line = line.strip() # Remove the \n + if line.lower().startswith('ep:') or line.lower().startswith('sp:'): + # If the episode is the same as the one we want to add, don't add it. + # We're going to re-add this later. + if line.lower().startswith(f'ep: {ep_obj.slug}') or line.lower().startswith(f'sp: {ep_obj.slug}'): + continue + + episodes.append(line) + else: + new_data.append(line) + + # Add the location for the new episode. + if ep_obj.series.location in ep_obj.location and ep_obj.location.replace(ep_obj.series.location, ''): + location = ep_obj.location.replace(ep_obj.series.location, '') + if location: + if ep_obj.season == 0: + episodes.append(f'sp: {ep_obj.episode:02d}: {location}') + else: + episodes.append(f'ep: {ep_obj.slug}: {location}') + + return new_data + episodes + + def write_ep_file(self, ep_obj): + """ + Add episode information to the .plexmatch file. + + The episode hint:value pairs are used to match an episode filename to a specific episode. + + Uses the format of: + ep: S01E12: /Season 01/Episode 12 - Finale Part 2.mkv + + :param ep_obj: Episode object for which to create the metadata + """ + # Parse existing .flexmatch data + flexmatch_file_path = self.get_show_file_path(ep_obj.series) + flexmatch_file_dir = os.path.dirname(flexmatch_file_path) + + with open(flexmatch_file_path) as f: + current_content = f.readlines() + + data = self._ep_data(current_content, ep_obj) + + if not data: + return False + + if not (flexmatch_file_path and flexmatch_file_dir): + log.debug('Unable to write episode flexmatch file because episode location is missing.') + return False + + try: + if not os.path.isdir(flexmatch_file_dir): + log.debug('Metadata directory missing, creating it at {location}', + {'location': flexmatch_file_dir}) + os.makedirs(flexmatch_file_dir) + helpers.chmod_as_parent(flexmatch_file_dir) + + log.debug('Writing episode flexmatch file to {location}', + {'location': flexmatch_file_path}) + + with open(flexmatch_file_path, 'w') as outfile: + outfile.write('\n'.join(data)) + + helpers.chmod_as_parent(flexmatch_file_path) + except IOError: + log.error('Unable to write file to {location}', {'location': flexmatch_file_path}) + return False + + return True + + def create_show_metadata(self, show_obj): + """Create show metadata.""" + if self.show_metadata and show_obj and (not self._has_show_metadata(show_obj) or self.overwrite_nfo): + log.debug( + 'Metadata provider {name} creating series metadata for {series}', + {'name': self.name, 'series': show_obj.name} + ) + return self.write_show_file(show_obj) + return False + + def create_episode_metadata(self, ep_obj): + """Create episode metadata.""" + if self.episode_metadata and ep_obj: + if not self._has_show_metadata(ep_obj.series): + self.write_show_file(ep_obj.series) + log.debug( + 'Metadata provider {name} creating episode metadata for {episode}', + {'name': self.name, 'episode': ep_obj.pretty_name()} + ) + return self.write_ep_file(ep_obj) + return False + + # Override with empty methods for unsupported features + def retrieveShowMetadata(self, folder): + """Disable retrieve show by metadata.""" + return None, None, None + + +# present a standard "interface" from the module +metadata_class = PlexMetadata diff --git a/medusa/providers/torrent/html/morethantv.py b/medusa/providers/torrent/html/morethantv.py index 8b3d053538..ef066d5a5c 100644 --- a/medusa/providers/torrent/html/morethantv.py +++ b/medusa/providers/torrent/html/morethantv.py @@ -141,9 +141,13 @@ def process_column_header(td): if row.find('img', alt='Nuked'): continue + title = cells[labels.index('Name')].find('a', class_='overlay_torrent').get_text(strip=True) + download_url = urljoin(self.url, cells[labels.index('Name')].find('a')['href']) + if not all([title, download_url]): + continue + seeders = int(cells[labels.index('Seeders')].get_text(strip=True).replace(',', '')) leechers = int(cells[labels.index('Leechers')].get_text(strip=True).replace(',', '')) - title = cells[labels.index('Name')].find('a').get_text(strip=True) # Filter unseeded torrent if seeders < self.minseed: @@ -158,29 +162,22 @@ def process_column_header(td): torrent_size = cells[labels.index('Size')].get_text(strip=True) size = convert_size(torrent_size, units=units) or -1 - pubdate_raw = cells[4].find('span')['title'] + pubdate_raw = cells[3].find('span')['title'] pubdate = self.parse_pubdate(pubdate_raw) - releases = cells[labels.index('Name')].find('table').find_all('tr') - for release in releases: - release_title = release.find('td').get_text(strip=True) - download_url = urljoin(self.url, release.find('a')['href']) - if not all([release_title, download_url]): - continue - - item = { - 'title': release_title, - 'link': download_url, - 'size': size, - 'seeders': seeders, - 'leechers': leechers, - 'pubdate': pubdate, - } - if mode != 'RSS': - log.debug('Found result: {0} with {1} seeders and {2} leechers', - title, seeders, leechers) - - items.append(item) + item = { + 'title': title, + 'link': download_url, + 'size': size, + 'seeders': seeders, + 'leechers': leechers, + 'pubdate': pubdate, + } + if mode != 'RSS': + log.debug('Found result: {0} with {1} seeders and {2} leechers', + title, seeders, leechers) + + items.append(item) except (AttributeError, TypeError, KeyError, ValueError, IndexError): log.exception('Failed parsing provider.') diff --git a/medusa/server/api/v2/base.py b/medusa/server/api/v2/base.py index d1471b6289..fda17efd84 100644 --- a/medusa/server/api/v2/base.py +++ b/medusa/server/api/v2/base.py @@ -608,6 +608,7 @@ def patch(self, target, value): 'sony_ps3': ListField(app, 'METADATA_PS3'), 'tivo': ListField(app, 'METADATA_TIVO'), 'wdtv': ListField(app, 'METADATA_WDTV'), + 'plex': ListField(app, 'METADATA_PLEX'), } map_values = OrderedDict([ diff --git a/medusa/server/api/v2/config.py b/medusa/server/api/v2/config.py index 2f436bde1e..7c43648026 100644 --- a/medusa/server/api/v2/config.py +++ b/medusa/server/api/v2/config.py @@ -142,6 +142,9 @@ class ConfigHandler(BaseRequestHandler): 'webInterface.httpsKey': StringField(app, 'HTTPS_KEY'), 'webInterface.handleReverseProxy': BooleanField(app, 'HANDLE_REVERSE_PROXY'), + 'backup.cacheDb': BooleanField(app, 'BACKUP_CACHE_DB'), + 'backup.cacheFiles': BooleanField(app, 'BACKUP_CACHE_FILES'), + 'webRoot': StringField(app, 'WEB_ROOT'), 'cpuPreset': StringField(app, 'CPU_PRESET'), 'sslVerify': BooleanField(app, 'SSL_VERIFY'), @@ -740,6 +743,10 @@ def data_main(): section_data['webInterface']['httpsKey'] = app.HTTPS_KEY section_data['webInterface']['handleReverseProxy'] = bool(app.HANDLE_REVERSE_PROXY) + section_data['backup'] = {} + section_data['backup']['cacheDb'] = bool(app.BACKUP_CACHE_DB) + section_data['backup']['cacheFiles'] = bool(app.BACKUP_CACHE_FILES) + section_data['webRoot'] = app.WEB_ROOT section_data['cpuPreset'] = app.CPU_PRESET section_data['sslVerify'] = bool(app.SSL_VERIFY) diff --git a/medusa/server/api/v2/system.py b/medusa/server/api/v2/system.py index e66ca0e490..4c27d3505e 100644 --- a/medusa/server/api/v2/system.py +++ b/medusa/server/api/v2/system.py @@ -2,7 +2,13 @@ """Request handler for statistics.""" from __future__ import unicode_literals +import logging +import os +import time + from medusa import app, ui +from medusa import helpers +from medusa.logger.adapters.style import BraceAdapter from medusa.server.api.v2.base import BaseRequestHandler from medusa.system.restart import Restart from medusa.system.shutdown import Shutdown @@ -10,6 +16,9 @@ from tornado.escape import json_decode +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + class SystemHandler(BaseRequestHandler): """System operation calls request handler.""" @@ -50,6 +59,7 @@ def post(self, identifier, *args, **kwargs): return self._created() else: return self._bad_request('Update failed') + else: return self._bad_request('Backup failed') else: ui.notifications.message('Already on branch: ', data['branch']) @@ -86,6 +96,12 @@ def post(self, identifier, *args, **kwargs): else: return self._bad_request('Failed starting download handler') + if data['type'] == 'BACKUPTOZIP': + return self._backup_to_zip(data.get('backupDir')) + + if data['type'] == 'RESTOREFROMZIP': + return self._restore_from_zip(data.get('backupFile')) + return self._bad_request('Invalid operation') def _backup(self, branch=None): @@ -126,3 +142,63 @@ def _update(self, branch=None): ), 'Check logs for more information.') return False + + def _backup_to_zip(self, backup_dir): + """Create a backup and save to zip.""" + final_result = '' + + if backup_dir: + source = [ + os.path.join(app.DATA_DIR, app.APPLICATION_DB), app.CONFIG_FILE + ] + + if app.BACKUP_CACHE_DB: + source += [ + os.path.join(app.DATA_DIR, app.FAILED_DB), + os.path.join(app.DATA_DIR, app.CACHE_DB), + os.path.join(app.DATA_DIR, app.RECOMMENDED_DB) + ] + target = os.path.join(backup_dir, 'medusa-{date}.zip'.format(date=time.strftime('%Y%m%d%H%M%S'))) + log.info(u'Starting backup to location: {location} ', {'location': target}) + + if app.BACKUP_CACHE_FILES: + for (path, dirs, files) in os.walk(app.CACHE_DIR, topdown=True): + for dirname in dirs: + if path == app.CACHE_DIR and dirname not in ['images']: + dirs.remove(dirname) + for filename in files: + source.append(os.path.join(path, filename)) + + if helpers.backup_config_zip(source, target, app.DATA_DIR): + final_result += 'Successful backup to {location}'.format(location=target) + else: + final_result += 'Backup FAILED' + else: + final_result += 'You need to choose a folder to save your backup to!' + + final_result += '
\n' + + log.info(u'Finished backup to location: {location} ', {'location': target}) + return self._ok(data={'result': final_result}) + + def _restore_from_zip(self, backup_file): + """Restore from zipped backup.""" + final_result = '' + + if backup_file: + source = backup_file + target_dir = os.path.join(app.DATA_DIR, 'restore') + log.info(u'Restoring backup from location: {location} ', {'location': backup_file}) + + if helpers.restore_config_zip(source, target_dir): + final_result += 'Successfully extracted restore files to {location}'.format(location=target_dir) + final_result += '
Restart Medusa to complete the restore.' + else: + final_result += 'Restore FAILED' + else: + final_result += 'You need to select a backup file to restore!' + + final_result += '
\n' + + log.info(u'Finished restore from location: {location}', {'location': backup_file}) + return self._ok(data={'result': final_result}) diff --git a/medusa/server/web/config/backup_restore.py b/medusa/server/web/config/backup_restore.py index ef93a26268..95766fe3d3 100644 --- a/medusa/server/web/config/backup_restore.py +++ b/medusa/server/web/config/backup_restore.py @@ -2,13 +2,6 @@ from __future__ import unicode_literals -import os -import time - -from medusa import ( - app, - helpers, -) from medusa.server.web.config.handler import Config from medusa.server.web.core import PageTemplate @@ -27,54 +20,3 @@ def index(self): [Converted to VueRouter] """ return PageTemplate(rh=self, filename='index.mako').render() - - @staticmethod - def backup(backupDir=None): - """Create backup.""" - final_result = '' - - if backupDir: - source = [os.path.join(app.DATA_DIR, app.APPLICATION_DB), app.CONFIG_FILE, - os.path.join(app.DATA_DIR, app.FAILED_DB), - os.path.join(app.DATA_DIR, app.CACHE_DB), - os.path.join(app.DATA_DIR, app.RECOMMENDED_DB)] - target = os.path.join(backupDir, 'medusa-{date}.zip'.format(date=time.strftime('%Y%m%d%H%M%S'))) - - for (path, dirs, files) in os.walk(app.CACHE_DIR, topdown=True): - for dirname in dirs: - if path == app.CACHE_DIR and dirname not in ['images']: - dirs.remove(dirname) - for filename in files: - source.append(os.path.join(path, filename)) - - if helpers.backup_config_zip(source, target, app.DATA_DIR): - final_result += 'Successful backup to {location}'.format(location=target) - else: - final_result += 'Backup FAILED' - else: - final_result += 'You need to choose a folder to save your backup to!' - - final_result += '
\n' - - return final_result - - @staticmethod - def restore(backupFile=None): - """Restore backup.""" - final_result = '' - - if backupFile: - source = backupFile - target_dir = os.path.join(app.DATA_DIR, 'restore') - - if helpers.restore_config_zip(source, target_dir): - final_result += 'Successfully extracted restore files to {location}'.format(location=target_dir) - final_result += '
Restart Medusa to complete the restore.' - else: - final_result += 'Restore FAILED' - else: - final_result += 'You need to select a backup file to restore!' - - final_result += '
\n' - - return final_result diff --git a/medusa/updater/version_checker.py b/medusa/updater/version_checker.py index d52ba25dcd..9a189b76e8 100644 --- a/medusa/updater/version_checker.py +++ b/medusa/updater/version_checker.py @@ -67,11 +67,11 @@ def _runbackup(self): log.info(u'Config backup in progress...') ui.notifications.message('Backup', 'Config backup in progress...') try: - backupDir = os.path.join(app.DATA_DIR, app.BACKUP_DIR) - if not os.path.isdir(backupDir): - os.mkdir(backupDir) + backup_dir = os.path.join(app.DATA_DIR, app.BACKUP_DIR) + if not os.path.isdir(backup_dir): + os.mkdir(backup_dir) - if self._keeplatestbackup(backupDir) and self._backup(backupDir): + if self._keeplatestbackup(backup_dir) and self._backup(backup_dir): log.info(u'Config backup successful') ui.notifications.message('Backup', 'Config backup successful') return True @@ -85,12 +85,12 @@ def _runbackup(self): return False @staticmethod - def _keeplatestbackup(backupDir=None): - if not backupDir: + def _keeplatestbackup(backup_dir=None): + if not backup_dir: return False import glob - files = glob.glob(os.path.join(backupDir, '*.zip')) + files = glob.glob(os.path.join(backup_dir, '*.zip')) if not files: return True @@ -109,29 +109,36 @@ def _keeplatestbackup(backupDir=None): # TODO: Merge with backup in helpers @staticmethod - def _backup(backupDir=None): - if not backupDir: + def _backup(backup_dir=None): + if not backup_dir: return False + source = [ os.path.join(app.DATA_DIR, app.APPLICATION_DB), - app.CONFIG_FILE, - os.path.join(app.DATA_DIR, app.FAILED_DB), - os.path.join(app.DATA_DIR, app.CACHE_DB), - os.path.join(app.DATA_DIR, app.RECOMMENDED_DB) + app.CONFIG_FILE ] - target = os.path.join(backupDir, app.BACKUP_FILENAME.format(timestamp=time.strftime('%Y%m%d%H%M%S'))) - for (path, dirs, files) in os.walk(app.CACHE_DIR, topdown=True): - for dirname in dirs: - if path == app.CACHE_DIR and dirname not in ['images']: - dirs.remove(dirname) - for filename in files: - source.append(os.path.join(path, filename)) + if app.BACKUP_CACHE_DB: + source += [ + os.path.join(app.DATA_DIR, app.FAILED_DB), + os.path.join(app.DATA_DIR, app.CACHE_DB), + os.path.join(app.DATA_DIR, app.RECOMMENDED_DB) + ] + + target = os.path.join(backup_dir, app.BACKUP_FILENAME.format(timestamp=time.strftime('%Y%m%d%H%M%S'))) + + if app.BACKUP_CACHE_FILES: + for (path, dirs, files) in os.walk(app.CACHE_DIR, topdown=True): + for dirname in dirs: + if path == app.CACHE_DIR and dirname not in ['images']: + dirs.remove(dirname) + for filename in files: + source.append(os.path.join(path, filename)) return helpers.backup_config_zip(source, target, app.DATA_DIR) def safe_to_update(self): - + """Verify if it's safe to update.""" def db_safe(self): message = { 'equal': { diff --git a/setup.cfg b/setup.cfg index 958417eb2f..1f3ea2dc6d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -91,6 +91,7 @@ flake8-ignore = medusa/metadata/ps3.py D100 D102 D205 D400 D401 N802 medusa/metadata/tivo.py D100 D102 D202 D205 D400 D401 N802 medusa/metadata/wdtv.py D100 D102 D202 D205 D400 D401 N802 N813 + medusa/metadata/plex.py D100 D102 D202 D205 D400 D401 N802 N813 medusa/name_cache.py D100 D200 D400 D401 N802 N806 medusa/naming.py D100 D101 D102 D103 D205 D400 D401 N806 medusa/network_timezones.py D100 D103 D200 D202 D400 diff --git a/tests/apiv2/test_config.py b/tests/apiv2/test_config.py index dfc7d82079..6403d2ee0a 100644 --- a/tests/apiv2/test_config.py +++ b/tests/apiv2/test_config.py @@ -176,6 +176,10 @@ def config_main(monkeypatch, app_config): section_data['providers']['prowlarr']['url'] = app.PROWLARR_URL section_data['providers']['prowlarr']['apikey'] = app.PROWLARR_APIKEY + section_data['backup'] = {} + section_data['backup']['cacheDb'] = bool(app.BACKUP_CACHE_DB) + section_data['backup']['cacheFiles'] = bool(app.BACKUP_CACHE_FILES) + return section_data diff --git a/themes-default/slim/src/app.js b/themes-default/slim/src/app.js index 1949353fa3..d050e9d734 100644 --- a/themes-default/slim/src/app.js +++ b/themes-default/slim/src/app.js @@ -10,6 +10,11 @@ import { App } from './components'; Vue.config.devtools = true; Vue.config.performance = true; +if (document.body.getAttribute('developer') === 'True') { + Vue.config.devtools = true; + Vue.config.performance = true; +} + registerPlugins(); // @TODO: Remove this before v1.0.0 diff --git a/themes-default/slim/src/components/app.vue b/themes-default/slim/src/components/app.vue index c2795ce6ce..ca23cb7abd 100644 --- a/themes-default/slim/src/components/app.vue +++ b/themes-default/slim/src/components/app.vue @@ -5,7 +5,7 @@
- +
@@ -30,7 +30,7 @@ import Alerts from './alerts.vue'; import AppHeader from './app-header.vue'; import SubMenu from './sub-menu.vue'; import AppFooter from './app-footer.vue'; -import { LoadProgressBar, ScrollButtons, SubmenuOffset } from './helpers'; +import { LoadProgressBar, ScrollButtons } from './helpers'; import { mapState } from 'vuex'; @@ -42,8 +42,7 @@ export default { AppHeader, LoadProgressBar, ScrollButtons, - SubMenu, - SubmenuOffset + SubMenu }, computed: { ...mapState({ @@ -79,7 +78,7 @@ export default { @media (max-width: 768px) { #app { - padding-top: 6rem; + padding-top: 3.8rem; } } diff --git a/themes-default/slim/src/components/backstretch.vue b/themes-default/slim/src/components/backstretch.vue index da04cac4de..090ddab61f 100644 --- a/themes-default/slim/src/components/backstretch.vue +++ b/themes-default/slim/src/components/backstretch.vue @@ -10,7 +10,8 @@ export default { }, data() { return { - created: false + created: false, + wrapper: null }; }, computed: { @@ -53,21 +54,24 @@ export default { $wrap.css('top', offset); $wrap.css('opacity', opacity).fadeIn(500); this.created = true; + this.wrapper = $wrap; + } + }, + removeBackStretch() { + if (this.created) { + $.backstretch('destroy'); + this.created = false; } } }, destroyed() { - if (this.created) { - $.backstretch('destroy'); - } + this.removeBackStretch(); }, activated() { this.setBackStretch(); }, deactivated() { - if (this.created) { - $.backstretch('destroy'); - } + this.removeBackStretch(); }, watch: { opacity(newOpacity) { diff --git a/themes-default/slim/src/components/config-backup-restore.vue b/themes-default/slim/src/components/config-backup-restore.vue index 89863ee285..242d56bb55 100644 --- a/themes-default/slim/src/components/config-backup-restore.vue +++ b/themes-default/slim/src/components/config-backup-restore.vue @@ -98,10 +98,10 @@ export default { backup.status = 'loading'; try { - const { data } = await this.client.apiRoute.get('config/backuprestore/backup', { - params: { backupDir: backup.dir }, timeout: 120000 - }); - backup.status = data; + const { data } = await this.client.api.post('system/operation', { + type: 'BACKUPTOZIP', backupDir: backup.dir + }, { timeout: 180000 }); + backup.status = data.result; backup.disabled = false; } catch (error) { this.$snotify.error( @@ -122,10 +122,11 @@ export default { restore.status = 'loading'; try { - const { data } = await this.client.apiRoute.get('config/backuprestore/restore', { - params: { backupFile: restore.file }, timeout: 120000 - }); - restore.status = data; + const { data } = await this.client.api.post('system/operation', { + type: 'RESTOREFROMZIP', backupFile: restore.file + }, { timeout: 180000 }); + + restore.status = data.result; restore.disabled = false; } catch (error) { this.$snotify.error( diff --git a/themes-default/slim/src/components/config-general.vue b/themes-default/slim/src/components/config-general.vue index 67cbccc501..c6b5a1f444 100644 --- a/themes-default/slim/src/components/config-general.vue +++ b/themes-default/slim/src/components/config-general.vue @@ -562,6 +562,28 @@
+ +
+
+

Backup

+
+
+
+ +

Include cache.db, failed.db, and recommended.db to the backup.

+

Note! These files are not mandatory for a proper restore, but could potentially cause timeouts when backing up or trying to update medusa.

+
+ + +

Include everything in the cache folder to the backup.

+

Note! These files are not mandatory for a proper restore, but could potentially cause timeouts when backing up or trying to update medusa.

+
+ + +
+
+
+
All non-absolute folder locations are relative to {{system.dataDir}}
diff --git a/themes-default/slim/src/components/display-show.vue b/themes-default/slim/src/components/display-show.vue index 384214bd94..0251a42035 100644 --- a/themes-default/slim/src/components/display-show.vue +++ b/themes-default/slim/src/components/display-show.vue @@ -377,6 +377,7 @@ - - diff --git a/themes-default/slim/src/components/schedule/calendar.vue b/themes-default/slim/src/components/schedule/calendar.vue index f40d89c219..e120eae7a0 100644 --- a/themes-default/slim/src/components/schedule/calendar.vue +++ b/themes-default/slim/src/components/schedule/calendar.vue @@ -29,20 +29,17 @@ - - - - - + - - - - diff --git a/themes/dark/assets/js/add-show-options.js b/themes/dark/assets/js/add-show-options.js deleted file mode 100644 index 08c210b1c7..0000000000 --- a/themes/dark/assets/js/add-show-options.js +++ /dev/null @@ -1 +0,0 @@ -$(document).ready((()=>{$(document.body).on("click","#saveDefaultsButton",(e=>{const t=[],s=[];$('select[name="allowed_qualities"] option:selected').each(((e,s)=>{t.push($(s).val())})),$('select[name="preferred_qualities"] option:selected').each(((e,t)=>{s.push($(t).val())}));const a={default_status:$("#statusSelect").val(),allowed_qualities:t.join(","),preferred_qualities:s.join(","),default_season_folders:$("#season_folders").prop("checked"),subtitles:$("#subtitles").prop("checked"),anime:$("#anime").prop("checked"),scene:$("#scene").prop("checked"),default_status_after:$("#statusSelectAfter").val()};$.get("config/general/saveAddShowDefaults",a),$(e.currentTarget).prop("disabled",!0),new PNotify({title:"Saved Defaults",text:'Your "add show" defaults have been set to your current selections.',shadow:!1})})),$(document.body).on("change",'#statusSelect, select[name="quality_preset"], #season_folders, select[name="allowed_qualities"], select[name="preferred_qualities"], #subtitles, #scene, #anime, #statusSelectAfter',(()=>{$("#saveDefaultsButton").prop("disabled",!1)}))})); \ No newline at end of file diff --git a/themes/dark/assets/js/add-shows/init.js b/themes/dark/assets/js/add-shows/init.js deleted file mode 100644 index 9cd271d8e9..0000000000 --- a/themes/dark/assets/js/add-shows/init.js +++ /dev/null @@ -1,196 +0,0 @@ -MEDUSA.addShows.init = function() { - $('#tabs').tabs({ - collapsible: true, - selected: (MEDUSA.config.layout.sortArticle ? -1 : 0) - }); - - const imgLazyLoad = new LazyLoad({ - // Example of options object -> see options section - threshold: 500 - }); - - $.initRemoteShowGrid = function() { - // Set defaults on page load - imgLazyLoad.update(); - imgLazyLoad.handleScroll(); - $('#showsort').val('original'); - $('#showsortdirection').val('asc'); - - $('#showsort').on('change', function() { - let sortCriteria; - switch (this.value) { - case 'original': - sortCriteria = 'original-order'; - break; - case 'rating': - /* Randomise, else the rating_votes can already - * have sorted leaving this with nothing to do. - */ - $('#container').isotope({ sortBy: 'random' }); - sortCriteria = 'rating'; - break; - case 'rating_votes': - sortCriteria = ['rating', 'votes']; - break; - case 'votes': - sortCriteria = 'votes'; - break; - default: - sortCriteria = 'name'; - break; - } - $('#container').isotope({ - sortBy: sortCriteria - }); - }); - - $(document.body).on('change', '#rootDirs', () => { - $.rootDirCheck(); - }); - - $('#showsortdirection').on('change', function() { - $('#container').isotope({ - sortAscending: (this.value === 'asc') - }); - }); - - $('#container').isotope({ - sortBy: 'original-order', - layoutMode: 'fitRows', - getSortData: { - name(itemElem) { - const name = $(itemElem).attr('data-name') || ''; - return (MEDUSA.config.layout.sortArticle ? name : name.replace(/^((?:the|a|an)\s)/i, '')).toLowerCase(); - }, - rating: '[data-rating] parseInt', - votes: '[data-votes] parseInt' - } - }).on('layoutComplete arrangeComplete removeComplete', () => { - imgLazyLoad.update(); - imgLazyLoad.handleScroll(); - }); - }; - - $.fn.loadRemoteShows = function(path, loadingTxt, errorTxt) { - $(this).html(' ' + loadingTxt); - $(this).load(path + ' #container', function(response, status) { - if (status === 'error') { - $(this).empty().html(errorTxt); - } else { - $.initRemoteShowGrid(); - imgLazyLoad.update(); - imgLazyLoad.handleScroll(); - } - }); - }; - - /* - * Blacklist a show by series id. - */ - $.initBlackListShowById = function() { - $(document.body).on('click', 'button[data-blacklist-show]', function(e) { - e.preventDefault(); - - if ($(this).is(':disabled')) { - return false; - } - - $(this).html('Blacklisted').prop('disabled', true); - $(this).parent().find('button[data-add-show]').prop('disabled', true); - - $.get('addShows/addShowToBlacklist?seriesid=' + $(this).attr('data-indexer-id')); - return false; - }); - }; - - /* - * Adds show by indexer and indexer_id with a number of optional parameters - * The show can be added as an anime show by providing the data attribute: data-isanime="1" - */ - $.initAddShowById = function() { - $(document.body).on('click', 'button[data-add-show]', function(e) { - e.preventDefault(); - - if ($(this).is(':disabled')) { - return false; - } - - $(this).html('Added').prop('disabled', true); - $(this).parent().find('button[data-blacklist-show]').prop('disabled', true); - - const anyQualArray = []; - const bestQualArray = []; - $('select[name="allowed_qualities"] option:selected').each((i, d) => { - anyQualArray.push($(d).val()); - }); - $('select[name="preferred_qualities"] option:selected').each((i, d) => { - bestQualArray.push($(d).val()); - }); - - const configureShowOptions = $('#configure_show_options').prop('checked'); - - $.get('addShows/addShowByID?showslug=' + $(this).attr('data-indexer') + $(this).attr('data-indexer-id'), { - root_dir: $('#rootDirs option:selected').val(), // eslint-disable-line camelcase - configure_show_options: configureShowOptions, // eslint-disable-line camelcase - show_name: $(this).attr('data-show-name'), // eslint-disable-line camelcase - quality_preset: $('select[name="quality_preset"]').val(), // eslint-disable-line camelcase - default_status: $('#statusSelect').val(), // eslint-disable-line camelcase - any_qualities: anyQualArray.join(','), // eslint-disable-line camelcase - best_qualities: bestQualArray.join(','), // eslint-disable-line camelcase - season_folders: $('#season_folders').prop('checked'), // eslint-disable-line camelcase - subtitles: $('#subtitles').prop('checked'), - anime: $('#anime').prop('checked'), - scene: $('#scene').prop('checked'), - default_status_after: $('#statusSelectAfter').val() // eslint-disable-line camelcase - }); - return false; - }); - }; - - $.updateBlackWhiteList = function(showName) { - $('#white').children().remove(); - $('#black').children().remove(); - $('#pool').children().remove(); - - if ($('#anime').prop('checked') && showName) { - $('#blackwhitelist').show(); - if (showName) { - $.getJSON('home/fetch_releasegroups', { - series_name: showName // eslint-disable-line camelcase - }, data => { - if (data.result === 'success') { - $.each(data.groups, (i, group) => { - const option = $('