From 8695768ddc77b28c451bcd7b6f2f0fbbc299bd46 Mon Sep 17 00:00:00 2001 From: miigotu Date: Wed, 17 Feb 2016 09:45:10 -0800 Subject: [PATCH 01/12] Fix searches with ABNormal Replace double quotes with single quotes as per our decided convention ABNormal, sort by Time for rss, or Seeders for everything else. --- sickbeard/providers/abnormal.py | 75 +++++++++++++++++---------------- 1 file changed, 38 insertions(+), 37 deletions(-) diff --git a/sickbeard/providers/abnormal.py b/sickbeard/providers/abnormal.py index 9fbd146ad8..d31bb8dcbe 100644 --- a/sickbeard/providers/abnormal.py +++ b/sickbeard/providers/abnormal.py @@ -36,7 +36,7 @@ class ABNormalProvider(TorrentProvider): # pylint: disable=too-many-instance-at def __init__(self): # Provider Init - TorrentProvider.__init__(self, "ABNormal") + TorrentProvider.__init__(self, 'ABNormal') # Credentials self.username = None @@ -48,14 +48,14 @@ def __init__(self): self.minleech = None # URLs - self.url = "https://abnormal.ws" + self.url = 'https://abnormal.ws' self.urls = { - "login": urljoin(self.url, "login.php"), - "search": urljoin(self.url, "torrents.php"), + 'login': urljoin(self.url, 'login.php'), + 'search': urljoin(self.url, 'torrents.php'), } # Proper Strings - self.proper_strings = ["PROPER"] + self.proper_strings = ['PROPER'] # Cache self.cache = tvcache.TVCache(self, min_time=30) @@ -65,17 +65,17 @@ def login(self): return True login_params = { - "username": self.username, - "password": self.password, + 'username': self.username, + 'password': self.password, } - response = self.get_url(self.urls["login"], post_data=login_params, timeout=30, returns="text") + response = self.get_url(self.urls['login'], post_data=login_params, timeout=30, returns='text') if not response: - logger.log("Unable to connect to provider", logger.WARNING) + logger.log('Unable to connect to provider', logger.WARNING) return False - if not re.search("torrents.php", response): - logger.log("Invalid username or password. Check your settings", logger.WARNING) + if not re.search('torrents.php', response): + logger.log('Invalid username or password. Check your settings', logger.WARNING) return False return True @@ -87,71 +87,72 @@ def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-man # Search Params search_params = { - "cat[]": ["TV|SD|VOSTFR", "TV|HD|VOSTFR", "TV|SD|VF", "TV|HD|VF", "TV|PACK|FR", "TV|PACK|VOSTFR", "TV|EMISSIONS", "ANIME"], - # Sorting: by time. Available parameters: ReleaseName, Seeders, Leechers, Snatched, Size - "order": "Time", - # Both ASC and DESC are available - "way": "DESC" + 'cat[]': ['TV|SD|VOSTFR', 'TV|HD|VOSTFR', 'TV|SD|VF', 'TV|HD|VF', 'TV|PACK|FR', 'TV|PACK|VOSTFR', 'TV|EMISSIONS', 'ANIME'], + # Both ASC and DESC are available for sort direction + 'way': 'DESC' } # Units - units = ["O", "KO", "MO", "GO", "TO", "PO"] + units = ['O', 'KO', 'MO', 'GO', 'TO', 'PO'] for mode in search_strings: items = [] - logger.log("Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {}'.format(mode), logger.DEBUG) for search_string in search_strings[mode]: - if mode != "RSS": - logger.log("Search string: {}".format(search_string.decode("utf-8")), + if mode != 'RSS': + logger.log('Search string: {}'.format(search_string.decode('utf-8')), logger.DEBUG) - search_params["search"] = search_string - data = self.get_url(self.urls["search"], params=search_params, returns="text") + # Sorting: Available parameters: ReleaseName, Seeders, Leechers, Snatched, Size + search_params['order'] = ('Seeders', 'Time')[mode == 'RSS'] + search_params['search'] = re.sub(r'[()]', '', search_string) + data = self.get_url(self.urls['search'], params=search_params, returns='text') if not data: continue - with BS4Parser(data, "html5lib") as html: - torrent_table = html.find("table", class_=re.compile("torrent_table cats")) - torrent_rows = torrent_table.find_all("tr") if torrent_table else [] + with BS4Parser(data, 'html5lib') as html: + torrent_table = html.find(class_='torrent_table') + torrent_rows = torrent_table.find_all('tr') if torrent_table else [] # Continue only if at least one Release is found if len(torrent_rows) < 2: - logger.log("Data returned from provider does not contain any torrents", logger.DEBUG) + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) continue # Catégorie, Release, Date, DL, Size, C, S, L - labels = [label.get_text(strip=True) for label in torrent_rows[0].find_all("td")] + labels = [label.get_text(strip=True) for label in torrent_rows[0].find_all('td')] # Skip column headers for result in torrent_rows[1:]: - cells = result.find_all("td") + cells = result.find_all('td') if len(cells) < len(labels): continue try: - title = cells[labels.index("Release")].get_text(strip=True) - download_url = urljoin(self.url, cells[labels.index("DL")].find("a", class_="tooltip")["href"]) + title = cells[labels.index('Release')].get_text(strip=True) + download_url = urljoin(self.url, cells[labels.index('DL')].find('a', class_='tooltip')['href']) if not all([title, download_url]): continue - seeders = try_int(cells[labels.index("S")].get_text(strip=True)) - leechers = try_int(cells[labels.index("L")].get_text(strip=True)) + seeders = try_int(cells[labels.index('S')].get_text(strip=True)) + leechers = try_int(cells[labels.index('L')].get_text(strip=True)) # Filter unseeded torrent if seeders < self.minseed or leechers < self.minleech: - if mode != "RSS": - logger.log("Discarding torrent because it doesn't meet the minimum seeders or leechers: {} (S:{} L:{})".format + if mode != 'RSS': + logger.log('Discarding torrent because it doesn\'t meet the minimum seeders or leechers: {} (S:{} L:{})'.format (title, seeders, leechers), logger.DEBUG) continue - torrent_size = cells[labels.index("Size")].get_text() + size_index = labels.index('Size') if 'Size' in labels else labels.index('Taille') + torrent_size = cells[size_index].get_text() size = convert_size(torrent_size, units=units) or -1 item = title, download_url, size, seeders, leechers - if mode != "RSS": - logger.log("Found result: {} with {} seeders and {} leechers".format + if mode != 'RSS': + logger.log('Found result: {} with {} seeders and {} leechers'.format (title, seeders, leechers), logger.DEBUG) items.append(item) From a445380e00b0bcc36d500c4542b2145cb424cf71 Mon Sep 17 00:00:00 2001 From: miigotu Date: Wed, 17 Feb 2016 19:12:20 -0800 Subject: [PATCH 02/12] NewPCT: * Unicode literals * Fix string quotes * Fix get_url override * Conformity and cleanup --- sickbeard/providers/newpct.py | 138 ++++++++++++++-------------------- 1 file changed, 56 insertions(+), 82 deletions(-) diff --git a/sickbeard/providers/newpct.py b/sickbeard/providers/newpct.py index 3b261dbe85..7bffdd4ccd 100644 --- a/sickbeard/providers/newpct.py +++ b/sickbeard/providers/newpct.py @@ -18,9 +18,9 @@ # You should have received a copy of the GNU General Public License # along with SickRage. If not, see . +from __future__ import unicode_literals +from requests.compat import urljoin import re -from six.moves import urllib -import traceback from sickbeard import helpers from sickbeard import logger, tvcache @@ -34,34 +34,32 @@ class newpctProvider(TorrentProvider): def __init__(self): - TorrentProvider.__init__(self, "Newpct") + TorrentProvider.__init__(self, 'Newpct') self.onlyspasearch = None - self.cache = tvcache.TVCache(self, min_time=10) - # Unsupported - # self.minseed = None - # self.minleech = None + self.url = 'http://www.newpct.com' + self.urls = {'search': urljoin(self.url, 'index.php')} - self.urls = { - 'base_url': 'http://www.newpct.com', - 'search': 'http://www.newpct.com/index.php' - } - - self.url = self.urls['base_url'] + self.cache = tvcache.TVCache(self, min_time=20) + def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals """ Search query: http://www.newpct.com/index.php?l=doSearch&q=fringe&category_=All&idioma_=1&bus_de_=All q => Show name - category_ = Category "Shows" (767) + category_ = Category 'Shows' (767) idioma_ = Language Spanish (1) bus_de_ = Date from (All, hoy) """ + results = [] - self.search_params = { + # Only search if user conditions are true + lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang + + search_params = { 'l': 'doSearch', 'q': '', 'category_': 'All', @@ -69,103 +67,79 @@ def __init__(self): 'bus_de_': 'All' } - def search(self, search_strings, age=0, ep_obj=None): # pylint: disable=too-many-locals - - results = [] - - # Only search if user conditions are true - lang_info = '' if not ep_obj or not ep_obj.show else ep_obj.show.lang - for mode in search_strings: items = [] - logger.log(u"Search Mode: {}".format(mode), logger.DEBUG) + logger.log('Search Mode: {}'.format(mode), logger.DEBUG) # Only search if user conditions are true if self.onlyspasearch and lang_info != 'es' and mode != 'RSS': - logger.log(u"Show info is not spanish, skipping provider search", logger.DEBUG) + logger.log('Show info is not spanish, skipping provider search', logger.DEBUG) continue + search_params['bus_de_'] = 'All' if mode != 'RSS' else 'hoy' + for search_string in search_strings[mode]: if mode != 'RSS': - logger.log(u"Search string: {}".format(search_string.decode("utf-8")), + logger.log('Search string: {}'.format(search_string.decode('utf-8')), logger.DEBUG) - self.search_params['q'] = search_string.strip() if mode != 'RSS' else '' - self.search_params['bus_de_'] = 'All' if mode != 'RSS' else 'hoy' + search_params['q'] = search_string - search_url = self.urls['search'] + '?' + urllib.parse.urlencode(self.search_params) - logger.log(u"Search URL: %s" % search_url, logger.DEBUG) - - data = self.get_url(search_url, timeout=30) + data = self.get_url(self.urls['search'], params=search_params, timeout=30, returns='text') if not data: continue - try: - with BS4Parser(data, 'html5lib') as html: - torrent_tbody = html.find('tbody') - - if torrent_tbody is None: - logger.log(u"Data returned from provider does not contain any torrents", logger.DEBUG) - continue - - torrent_table = torrent_tbody.findAll('tr') - if not torrent_table: - logger.log(u"Torrent table does not have any rows", logger.DEBUG) - continue - - for row in torrent_table[:-1]: - try: - torrent_row = row.findAll('a')[0] - - download_url = torrent_row.get('href', '') - - title = self._processTitle(torrent_row.get('title', '')) - - # Provider does not provide seeders/leechers - seeders = 1 - leechers = 0 - torrent_size = row.findAll('td')[2].text - - size = convert_size(torrent_size) or -1 - except (AttributeError, TypeError): - continue - + with BS4Parser(data, 'html5lib') as html: + torrent_table = html.find('table', id='categoryTable') + torrent_rows = torrent_table.find_all('tr') if torrent_table else [] + + # Continue only if at least one Release is found + if len(torrent_rows) < 3: # Headers + 1 Torrent + Pagination + logger.log('Data returned from provider does not contain any torrents', logger.DEBUG) + continue + + # 'Fecha', 'Título', 'Tamaño', '' + # Date, Title, Size + labels = [label.get_text(strip=True) for label in torrent_rows[0].find_all('th')] + for row in torrent_rows[1:-1]: + try: + cells = row.find_all('td') + + torrent_row = row.find('a') + title = self._processTitle(torrent_row.get('title', '')) + download_url = torrent_row.get('href', '') if not all([title, download_url]): continue - # Filter unseeded torrent (Unsupported) - # if seeders < self.minseed or leechers < self.minleech: - # if mode != 'RSS': - # logger.log(u"Discarding torrent because it doesn't meet the minimum seeders or leechers: {} (S:{} L:{})".format(title, seeders, leechers), logger.DEBUG) - # continue + # Provider does not provide seeders/leechers + seeders = 1 + leechers = 0 + torrent_size = cells[labels.index('Tamaño')].get_text(strip=True) + size = convert_size(torrent_size) or -1 item = title, download_url, size, seeders, leechers if mode != 'RSS': - logger.log(u"Found result: %s " % title, logger.DEBUG) + logger.log('Found result: {}'.format(title), logger.DEBUG) items.append(item) - - except Exception: - logger.log(u"Failed parsing provider. Traceback: %s" % traceback.format_exc(), logger.WARNING) - - # For each search mode sort all the items by seeders if available (Unsupported) - # items.sort(key=lambda tup: tup[3], reverse=True) + except (AttributeError, TypeError): + continue results += items return results - def get_url(self, url, post_data=None, params=None, timeout=30, json=False, need_bytes=False): # pylint: disable=too-many-arguments + def get_url(self, url, post_data=None, params=None, timeout=30, json=False, need_bytes=False, **kwargs): # pylint: disable=too-many-arguments """ need_bytes=True when trying access to torrent info (For calling torrent client). Previously we must parse the URL to get torrent file """ if need_bytes: - data = helpers.getURL(url, headers=self.headers, timeout=timeout, session=self.session, returns='json') + data = super(newpctProvider, self).get_url(url, post_data=post_data, params=params, timeout=timeout, json=json, kwargs=kwargs) url = re.search(r'http://tumejorserie.com/descargar/.+\.torrent', data, re.DOTALL).group() - return helpers.getURL(url, post_data=post_data, params=params, headers=self.headers, timeout=timeout, - session=self.session, json=json, need_bytes=need_bytes) + return super(newpctProvider, self).get_url(url, post_data=post_data, params=params, timeout=timeout, + json=json, need_bytes=need_bytes, kwargs=kwargs) def download_result(self, result): """ @@ -186,24 +160,24 @@ def download_result(self, result): if url_torrent.startswith('http'): self.headers.update({'Referer': '/'.join(url_torrent.split('/')[:3]) + '/'}) - logger.log(u"Downloading a result from " + self.name + " at " + url) + logger.log('Downloading a result from {}'.format(url)) if helpers.download_file(url_torrent, filename, session=self.session, headers=self.headers): if self._verify_download(filename): - logger.log(u"Saved result to " + filename, logger.INFO) + logger.log('Saved result to {}'.format(filename), logger.INFO) return True else: - logger.log(u"Could not download %s" % url, logger.WARNING) + logger.log('Could not download {}'.format(url), logger.WARNING) helpers.remove_file_failed(filename) if len(urls): - logger.log(u"Failed to download any results", logger.WARNING) + logger.log('Failed to download any results', logger.WARNING) return False @staticmethod def _processTitle(title): - # Remove "Mas informacion sobre " literal from title + # Remove 'Mas informacion sobre ' literal from title title = title[22:] # Quality - Use re module to avoid case sensitive problems with replace From e143bb94e4365474baa7304e3a33f26d45d2acd4 Mon Sep 17 00:00:00 2001 From: Indigo744 Date: Thu, 18 Feb 2016 21:51:20 +0100 Subject: [PATCH 03/12] Add OCS network logo MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Added OCS (Orange Ciné Series) logo, a french network. --- gui/slick/images/network/ocs.png | Bin 0 -> 3501 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 gui/slick/images/network/ocs.png diff --git a/gui/slick/images/network/ocs.png b/gui/slick/images/network/ocs.png new file mode 100644 index 0000000000000000000000000000000000000000..afae58e058972b2fb5580b370d19dfa5476cb218 GIT binary patch literal 3501 zcmV;e4N~%nP)004R>004l5008;`004mK004C`008P>0026e000+ooVrmw00006 zVoOIv00000008+zyMF)x010qNS#tmYE+PN`E+PS{;@y$}01XjIL_t(&-tAa)!feCQBR{))|4vFd+L0>$FNHp!aTmcAAhmnIXbZE4pCo~N)jLq zAddkC0ihsZ11td64OjNe+abqKo#V^De@%OMZhfs`uU_~lHorTWN>Ao0HUJHP;Q@pI z%pf=funNeOPUSrE?honSU%zH_?0%tL1BCW{Hj3Z+%>w0AQLZomcr>t3+7BJ+0+tPF z0&aG%LY_Q#s(1LEn=Yn&XyyEgyyUg%YK6Zm`2f~+fJFP)V8F3}MuzM41ntSMJsmgp zskd7>s;(-@`SZi?D4V~yN6`UzHLwuv`4q%15V9eg0B7Kk0`BmveuVGICdhX5JjDgf4uD4uk| z)i&IlezsDX_VL}Co@fgIaBI#c%Y{!b)G8`6DcJ)i3M)(mxHh=a1lNavKnQ3w0oREk zNkEbT?ZSnQ{Py#2I@P5qu{u~K`>&#RU!141B*Yt0B?*9K0s#{o2!-(*Oh7te%!ti7 z&I6^k&L@}sVWzArH2}aYJ=Rlf{^w-H2e76`4QN?#HJ{_8bv@gfpN$0o6#$&I|Cl5W6bz#TJ)Dcp*0(~CAh>xWHdD^90x3aM^z|@Nxc4SS zT3|iK@(CK0L%WB_)eHX?+MN8 zJxJe|Nhc*M2a8{stsdPu99~b@rkVz7GC|uueSqt$KUdw_^C`{?w>&HqMDjhNkmJCW z0`7y;f31vpWWJu5ZUcZ@RixP)KYEfEPoJaMHlV6uV4*^2Hr%btA95>Byrp#RTO&^$ z>QDRD-6?#C7&gK-7=oLWUd>Pbb%OWWpRWa00RWns>b?BlOS9;MKR+dO8)z;O42Xb) zdvI$$fNga8o8FL!eTXW?f7GR+`(F&XxOje4^N9#N7Za04Mk6yBE%N zQsur4BdDw>hrB8{Cg-`yfjfG_GVg!Cy$Wsp|| zoiFKS7p%Aqz)~FEd6THC_i}Z_7j zkO~IDGT|n5sMB=CqjJ%nks@e=6Ft$ib}YDw@gaD=U}*Vs5DytZ5KtT@seem?<|bdfa?@%s$!sC;QyO-TUdj=cgdE=eM3t zeVd$`Z#CHUUWJ$_DXT$sE6xlIM^a1$wj{V)&tfIL%Mn~m(z+Z)M)wjF9~uBHx=bn{ zD$jRw{O8g{Re2`b-l8^utlYz%9)rs+2Cb%a4+6Mo10?$8Z{c{411qG$h}0HlK-C}= z0s6i?ll|r6c-QK29A~)5=w2wh_dn3Ia=`{k?#Ft1zxoy|app)a47Jq(>;jaUabE9~ z8@Lz=tz*wdj!W`0w_axfps_kp)K_#N>BhCb*dj@m-0?hqgoxa0_sP+}{JR?fC}^V9 zbxjJ`Fn|ODBHjZ<+KM=LqMy2P`Gd;*CziO=?%EODxb)r@V=HwRRMIqXtR@VQraPA+ z!ilL3a9gjD0AMv4=>8LE*gr`#*-h{@|!^yZ1W<({8Pp0&wkjo!z=J-^B=6M|T-i zQ`w2y@i2^p2FFM%mqKl5t$l<{ZDr>lYk(F*jQGVq&y>GCE9Y$J=S-PD!5w+)Vmmwc zn5%ki*$@Jn+N20G*lR#BC3w?E52#93-~HIV<@#2r8iD+$`$&ILE`a^*9ZUxGG(4u__I}%9k ze3lc^%Bb}C$Z(ztkfN{g0@xC*)! zbpQw=UICxjbB)S*H7V^R6}HiDh)~@n{gGCE4z_PwoM*pe?TBFU)qI8TGzWQIU+8HXZ@M z9eu}g49H(67C!q95u&%N6E>4Q<&7}_)~HvGsRFPCz?M)&$pUxoLmw#L?6@AjX08mW z>Cgg|al#pxze8s9C_^w>^B4kTD4_N4Pi$QK_la#u2G=i|Kx^N9m^=!&g#`ho4epdx zPD4ho0sxVdw}In47TI>x`icS~20r=I3!&pX2mQbRtKOL+Dl3vHP60Fx&@_;y0Ro_I zxy6(-Xg?cXNdnP*Lx3cp*o`@D@jRWcDCexcC&}hksX&em+m{p~00{l#*_q8xFlnTBauUJ733{Mo38C)BJGvS%p-bwRb001}e zonpWK(8ubw)#F>u0zh~cNbgoI2jBKN6z3#C(NR-9RP0$fUa+e~i!uR@1=n&RXMFZE z${W3zrVKr-oH?Ew9RybfaYu0sz#TaIU!W z1kGG?qbI$)zj@g@wsR1_GIo<1s7_{In}H~3f^C3x0!aor0QnTKs>fh6xGn@|#4YQ^ zv=8s{cvFMck{Qp^qL*G52{G>#5j^SfIx?$wArjNe(O92C=Z;>Z*g+MZ7-vZrvcU#F zJ^#O2Dwuh2?F~HrzSVktO&lk*o!L3ub^_4|iIso}CqBLjFMc%E7(8Zk+ii2j8NEsO8+2FKSIVnVA)0#;Q?zuWL>KfL&J?Z_gUHQropPU!_eW zWL*e$PVO-=dG2H_v2)!I-jA9J3-ZM0e|d%uZWv3|RmnmCq>FID0RYewhdbSb8;y${L} Date: Sat, 20 Feb 2016 15:47:01 -0800 Subject: [PATCH 04/12] Plex: Always return True when getting auth token if username/password aren't configured This was causing failures to update Plex server when no username and password were specified since we were failing to get the auth token --- sickbeard/__init__.py | 5 +---- sickbeard/notifiers/plex.py | 3 ++- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/sickbeard/__init__.py b/sickbeard/__init__.py index 58dea8110f..f871f27083 100644 --- a/sickbeard/__init__.py +++ b/sickbeard/__init__.py @@ -351,7 +351,6 @@ PLEX_CLIENT_HOST = None PLEX_SERVER_USERNAME = None PLEX_SERVER_PASSWORD = None -PLEX_SERVER_NO_AUTH = False USE_PLEX_CLIENT = False PLEX_CLIENT_USERNAME = None @@ -601,7 +600,7 @@ def initialize(consoleLogging=True): # pylint: disable=too-many-locals, too-man USE_KODI, KODI_ALWAYS_ON, KODI_NOTIFY_ONSNATCH, KODI_NOTIFY_ONDOWNLOAD, KODI_NOTIFY_ONSUBTITLEDOWNLOAD, KODI_UPDATE_FULL, KODI_UPDATE_ONLYFIRST, \ KODI_UPDATE_LIBRARY, KODI_HOST, KODI_USERNAME, KODI_PASSWORD, BACKLOG_FREQUENCY, \ USE_TRAKT, TRAKT_USERNAME, TRAKT_ACCESS_TOKEN, TRAKT_REFRESH_TOKEN, TRAKT_REMOVE_WATCHLIST, TRAKT_SYNC_WATCHLIST, TRAKT_REMOVE_SHOW_FROM_SICKRAGE, TRAKT_METHOD_ADD, TRAKT_START_PAUSED, traktCheckerScheduler, TRAKT_USE_RECOMMENDED, TRAKT_SYNC, TRAKT_SYNC_REMOVE, TRAKT_DEFAULT_INDEXER, TRAKT_REMOVE_SERIESLIST, TRAKT_TIMEOUT, TRAKT_BLACKLIST_NAME, \ - USE_PLEX_SERVER, PLEX_SERVER_NO_AUTH, PLEX_NOTIFY_ONSNATCH, PLEX_NOTIFY_ONDOWNLOAD, PLEX_NOTIFY_ONSUBTITLEDOWNLOAD, PLEX_UPDATE_LIBRARY, USE_PLEX_CLIENT, PLEX_CLIENT_USERNAME, PLEX_CLIENT_PASSWORD, \ + USE_PLEX_SERVER, PLEX_NOTIFY_ONSNATCH, PLEX_NOTIFY_ONDOWNLOAD, PLEX_NOTIFY_ONSUBTITLEDOWNLOAD, PLEX_UPDATE_LIBRARY, USE_PLEX_CLIENT, PLEX_CLIENT_USERNAME, PLEX_CLIENT_PASSWORD, \ PLEX_SERVER_HOST, PLEX_SERVER_TOKEN, PLEX_CLIENT_HOST, PLEX_SERVER_USERNAME, PLEX_SERVER_PASSWORD, PLEX_SERVER_HTTPS, MIN_BACKLOG_FREQUENCY, SKIP_REMOVED_FILES, ALLOWED_EXTENSIONS, \ USE_EMBY, EMBY_HOST, EMBY_APIKEY, \ showUpdateScheduler, __INITIALIZED__, INDEXER_DEFAULT_LANGUAGE, EP_DEFAULT_DELETED_STATUS, LAUNCH_BROWSER, TRASH_REMOVE_SHOW, TRASH_ROTATE_LOGS, SORT_ARTICLE, \ @@ -1024,7 +1023,6 @@ def path_leaf(path): PLEX_CLIENT_USERNAME = check_setting_str(CFG, 'Plex', 'plex_client_username', '', censor_log=True) PLEX_CLIENT_PASSWORD = check_setting_str(CFG, 'Plex', 'plex_client_password', '', censor_log=True) PLEX_SERVER_HTTPS = bool(check_setting_int(CFG, 'Plex', 'plex_server_https', 0)) - PLEX_SERVER_NO_AUTH = bool(check_setting_int(CFG, 'Plex', 'plex_server_no_auth', 0)) USE_EMBY = bool(check_setting_int(CFG, 'Emby', 'use_emby', 0)) EMBY_HOST = check_setting_str(CFG, 'Emby', 'emby_host', '') @@ -1936,7 +1934,6 @@ def save_config(): # pylint: disable=too-many-statements, too-many-branches new_config['Plex']['plex_client_host'] = PLEX_CLIENT_HOST new_config['Plex']['plex_server_username'] = PLEX_SERVER_USERNAME new_config['Plex']['plex_server_password'] = helpers.encrypt(PLEX_SERVER_PASSWORD, ENCRYPTION_VERSION) - new_config['Plex']['plex_server_no_auth'] = int(PLEX_SERVER_NO_AUTH) new_config['Plex']['use_plex_client'] = int(USE_PLEX_CLIENT) new_config['Plex']['plex_client_username'] = PLEX_CLIENT_USERNAME diff --git a/sickbeard/notifiers/plex.py b/sickbeard/notifiers/plex.py index ea71d5a6ac..0c26a41606 100644 --- a/sickbeard/notifiers/plex.py +++ b/sickbeard/notifiers/plex.py @@ -130,6 +130,7 @@ def update_library(self, ep_obj=None, host=None, # pylint: disable=too-many-arg return False if not self.get_token(username, password, plex_server_token): + logger.log(u'PLEX: Error getting auth token for Plex Media Server, check your settings', logger.WARNING) return False file_location = '' if not ep_obj else ep_obj.location @@ -220,7 +221,7 @@ def get_token(self, username=None, password=None, plex_server_token=None): return True if not (username and password): - return sickbeard.PLEX_SERVER_NO_AUTH + return True logger.log(u'PLEX: fetching plex.tv credentials for user: ' + username, logger.DEBUG) From dde73eaa9d4e83128519583bc52d2d77c08c4545 Mon Sep 17 00:00:00 2001 From: miigotu Date: Mon, 22 Feb 2016 11:30:49 -0800 Subject: [PATCH 05/12] Use a decorator to prevent processing a dir more than once at a time, regardless of caller Fixes https://github.com/SickRage/sickrage-issues/issues/1028 --- sickbeard/processTV.py | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/sickbeard/processTV.py b/sickbeard/processTV.py index 142bcd7efc..6a0327123f 100644 --- a/sickbeard/processTV.py +++ b/sickbeard/processTV.py @@ -20,6 +20,7 @@ import os import stat +from functools import wraps import sickbeard from sickbeard import postProcessor @@ -142,7 +143,25 @@ def logHelper(logMessage, logLevel=logger.INFO): return logMessage + u"\n" +def OneRunPP(): + isRunning = [False] + + def decorate(func): + @wraps(func) + def func_wrapper(*args, **kargs): + if isRunning[0]: + return logHelper(u'Post processor is already running', logger.ERROR) + + isRunning[0] = True + ret = func(*args, **kargs) + isRunning[0] = False + return ret + return func_wrapper + return decorate + + # pylint: disable=too-many-arguments,too-many-branches,too-many-statements,too-many-locals +@OneRunPP() def processDir(dirName, nzbName=None, process_method=None, force=False, is_priority=None, delete_on=False, failed=False, proc_type="auto"): """ Scans through the files in dirName and processes whatever media files it finds @@ -633,7 +652,7 @@ def subtitles_enabled(video): :param video: video filename to be parsed """ - + try: parse_result = NameParser().parse(video, cache_result=True) except (InvalidNameException, InvalidShowException): From 72b9c0be41b8b3edbf5aa61be02a1b25576b62dc Mon Sep 17 00:00:00 2001 From: miigotu Date: Tue, 23 Feb 2016 11:03:23 -0800 Subject: [PATCH 06/12] Fixes https://github.com/SickRage/sickrage-issues/issues/1046 --- sickbeard/helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/helpers.py b/sickbeard/helpers.py index 1b3f54ed80..3acfbe352a 100644 --- a/sickbeard/helpers.py +++ b/sickbeard/helpers.py @@ -353,7 +353,7 @@ def copyFile(srcFile, destFile): try: ek(shutil.copyfile, srcFile, destFile) except (SpecialFileError, Error) as error: - logger.log(error, logger.WARNING) + logger.log(u'{}'.format(error), logger.WARNING) except Exception as error: logger.log(u'{}'.format(error), logger.ERROR) else: From 28a2af78ed3bdb3d94ade88dea06b6c23e5e672b Mon Sep 17 00:00:00 2001 From: Thor Jacobsen Date: Tue, 23 Feb 2016 22:06:12 +0100 Subject: [PATCH 07/12] fix: Danishbits provider now works again Aparently, BS4 `class_` lookup does not work when element has more than one class? Anywho, replaced the lookup with `id` --- sickbeard/providers/danishbits.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sickbeard/providers/danishbits.py b/sickbeard/providers/danishbits.py index d9d3c0bb4e..8b07e72d18 100644 --- a/sickbeard/providers/danishbits.py +++ b/sickbeard/providers/danishbits.py @@ -126,7 +126,7 @@ def process_column_header(td): continue with BS4Parser(data, 'html5lib') as html: - torrent_table = html.find('table', class_='torrent_table') + torrent_table = html.find('table', id='torrent_table') torrent_rows = torrent_table.find_all('tr') if torrent_table else [] # Continue only if at least one Release is found From f432b0a4d45d4a8cf38c0ff0e4fb0c21d0ac3d08 Mon Sep 17 00:00:00 2001 From: miigotu Date: Wed, 24 Feb 2016 23:30:23 -0800 Subject: [PATCH 08/12] Gui had wrong replacement chars for scene numbers in renamer --- gui/slick/views/config_postProcessing.mako | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/gui/slick/views/config_postProcessing.mako b/gui/slick/views/config_postProcessing.mako index 28cf077369..050b586cb5 100644 --- a/gui/slick/views/config_postProcessing.mako +++ b/gui/slick/views/config_postProcessing.mako @@ -321,7 +321,7 @@   - %0XMS + %0XS 02 @@ -341,7 +341,7 @@   - %0XME + %0XE 03 @@ -919,12 +919,12 @@ XEM Season Number: - %XMS + %XS 2   - %0XMS + %0XS 02 @@ -944,7 +944,7 @@   - %0XME + %0XE 03 From 68720a5f6efee84f1b581b1eba650cd73e50cd53 Mon Sep 17 00:00:00 2001 From: miigotu Date: Thu, 25 Feb 2016 00:48:37 -0800 Subject: [PATCH 09/12] Disable show update on start until it can be improved --- SickBeard.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/SickBeard.py b/SickBeard.py index b0f4a2a1f4..c997af094d 100755 --- a/SickBeard.py +++ b/SickBeard.py @@ -368,7 +368,7 @@ def start(self): # pylint: disable=too-many-branches,too-many-statements failed_history.trimHistory() # Check for metadata indexer updates for shows (sets the next aired ep!) - sickbeard.showUpdateScheduler.forceRun() + # sickbeard.showUpdateScheduler.forceRun() # Launch browser if sickbeard.LAUNCH_BROWSER and not (self.no_launch or self.run_as_daemon): From 93c88caf49fd3748beb0c84a8d10256ddeb6f55a Mon Sep 17 00:00:00 2001 From: miigotu Date: Thu, 25 Feb 2016 00:51:22 -0800 Subject: [PATCH 10/12] Missed a few replace-map typos --- gui/slick/views/config_postProcessing.mako | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gui/slick/views/config_postProcessing.mako b/gui/slick/views/config_postProcessing.mako index 050b586cb5..46bf5b0a3b 100644 --- a/gui/slick/views/config_postProcessing.mako +++ b/gui/slick/views/config_postProcessing.mako @@ -316,7 +316,7 @@ XEM Season Number: - %XMS + %XS 2 @@ -336,7 +336,7 @@ XEM Episode Number: - %XME + %XE 3 @@ -939,7 +939,7 @@ XEM Episode Number: - %XME + %XE 3 From 0dadb35b5261b82b97942b7c91b3cae351266619 Mon Sep 17 00:00:00 2001 From: neoatomic Date: Thu, 25 Feb 2016 14:38:22 +0100 Subject: [PATCH 11/12] Added provider icons for french-adn.com & torrentshack.me --- gui/slick/images/providers/french-adn.png | Bin 0 -> 588 bytes gui/slick/images/providers/french-adn_com.png | Bin 0 -> 588 bytes gui/slick/images/providers/torrentshack.png | Bin 0 -> 630 bytes gui/slick/images/providers/torrentshack_me.png | Bin 0 -> 630 bytes 4 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 gui/slick/images/providers/french-adn.png create mode 100644 gui/slick/images/providers/french-adn_com.png create mode 100644 gui/slick/images/providers/torrentshack.png create mode 100644 gui/slick/images/providers/torrentshack_me.png diff --git a/gui/slick/images/providers/french-adn.png b/gui/slick/images/providers/french-adn.png new file mode 100644 index 0000000000000000000000000000000000000000..521edbf50a6516c85e7f0f8fb51896927f116311 GIT binary patch literal 588 zcmV-S0<-;zP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGi!vFvd!vV){sAK>D02p*dSaefwW^{L9 za%BK;VQFr3E^cLXAT%y8E;(#7eog=Y0li5?K~y+TbyCYt!cY*szu-a=14W_@wuGjl zlD?r&pe+apw1EYP2_$NOKXC8P4{+ncm2qXFet;k5!4g|dxS39-bLY&QnYpB1uPcgD zsZ^@fsw7F!hGD2b2TpjPBafzOx~>y^@Z#S!-TTq#VL03(;hm+aqPVGm#JxuUbG7=~ zDmTxTAfKKP$8ls?M!Al2@$o+>Dc4U@fj`OgF-L3Uvz-+Ws#jzw^D#}zU_Rgr?Y&J{NG|RHZVv%7Oj^mJ;U;=0wdf*3fg=weFv$iNU zb-iQTq3@lWdPSDkQ^I^AN^QHly@%y%;CMU_E*na8cLHAefk@2>^7@ z?Z*`sx9{`$TP(9jUoaU4%-56zK>(&@c4ENTJKoz#rBdm1I+Mvz6qU>6(E7S*>;a1d zwC@cPQ}SgxgRzv$W#szp6GVt@+YzO{n*dCFEx%tXm2id0AOZTLQ5#xwwgM6b^A6rEfGE;EeYMo4BqE0BT`z2|yD7BnAL%5d#Cq aFpO^$tOnq~Am_{g0000Px#1ZP1_K>z@;j|==^1poj532;bRa{vGi!vFvd!vV){sAK>D02p*dSaefwW^{L9 za%BK;VQFr3E^cLXAT%y8E;(#7eog=Y0li5?K~y+TbyCYt!cY*szu-a=14W_@wuGjl zlD?r&pe+apw1EYP2_$NOKXC8P4{+ncm2qXFet;k5!4g|dxS39-bLY&QnYpB1uPcgD zsZ^@fsw7F!hGD2b2TpjPBafzOx~>y^@Z#S!-TTq#VL03(;hm+aqPVGm#JxuUbG7=~ zDmTxTAfKKP$8ls?M!Al2@$o+>Dc4U@fj`OgF-L3Uvz-+Ws#jzw^D#}zU_Rgr?Y&J{NG|RHZVv%7Oj^mJ;U;=0wdf*3fg=weFv$iNU zb-iQTq3@lWdPSDkQ^I^AN^QHly@%y%;CMU_E*na8cLHAefk@2>^7@ z?Z*`sx9{`$TP(9jUoaU4%-56zK>(&@c4ENTJKoz#rBdm1I+Mvz6qU>6(E7S*>;a1d zwC@cPQ}SgxgRzv$W#szp6GVt@+YzO{n*dCFEx%tXm2id0AOZTLQ5#xwwgM6b^A6rEfGE;EeYMo4BqE0BT`z2|yD7BnAL%5d#Cq aFpO^$tOnq~Am_{g0000JP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGjU;qFQU;%n~MU4Oe00(qQO+^RZ1sVV> z04kM)+5i9m7<5HgbW?9;ba!ELWdLwtX>N2bZe?^JG%heMIczh2P5=M_)Ja4^R5(v9 zl4)|{Fc5_`$s7i}%v4!2|H9%WCxe`ZU}MRE&61hF6T+G1YbjNYY0K~Rd);a^&C6s~ zBxaT7MVzlIJHFJ+!OwB9j0H%2jEi!W@ zf4WHWDw51zuD=EA5_Ux7Yoor_KR!1SA;*ZK zf!*;YHtfk;5;NpzKGuMqiThf8Zl0m}Ddi%xwNiNR2fFuCyTj0QVh`N??J3EY%rMfG zV!$$cH;N5AG9^EsvskwyZgKK&?eAC5;iz!UQvMrs!XEstyTBxWY@YDLKs{$EJp=+H z7eqe<0(^gi0}7ALPi7+oQ?eQh{Mq9laJUCb8}9VCJ$`hDkFCAl-xQW8n&wNnX<4LM zfff;EFPzG5vZO3RR{UZqvYK4(Tfc|`Kd)7XJP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGjU;qFQU;%n~MU4Oe00(qQO+^RZ1sVV> z04kM)+5i9m7<5HgbW?9;ba!ELWdLwtX>N2bZe?^JG%heMIczh2P5=M_)Ja4^R5(v9 zl4)|{Fc5_`$s7i}%v4!2|H9%WCxe`ZU}MRE&61hF6T+G1YbjNYY0K~Rd);a^&C6s~ zBxaT7MVzlIJHFJ+!OwB9j0H%2jEi!W@ zf4WHWDw51zuD=EA5_Ux7Yoor_KR!1SA;*ZK zf!*;YHtfk;5;NpzKGuMqiThf8Zl0m}Ddi%xwNiNR2fFuCyTj0QVh`N??J3EY%rMfG zV!$$cH;N5AG9^EsvskwyZgKK&?eAC5;iz!UQvMrs!XEstyTBxWY@YDLKs{$EJp=+H z7eqe<0(^gi0}7ALPi7+oQ?eQh{Mq9laJUCb8}9VCJ$`hDkFCAl-xQW8n&wNnX<4LM zfff;EFPzG5vZO3RR{UZqvYK4(Tfc|`Kd)7X Date: Thu, 25 Feb 2016 18:45:38 -0800 Subject: [PATCH 12/12] 50 Threads? Really? This fix halves VIRT alloc, to halve it again add export MALLOC_ARENA_MAX=2 (or 1) to your runscript on linux) --- sickbeard/webserve.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/sickbeard/webserve.py b/sickbeard/webserve.py index 16c552611c..8086c830e6 100644 --- a/sickbeard/webserve.py +++ b/sickbeard/webserve.py @@ -88,8 +88,12 @@ from tornado.web import RequestHandler, HTTPError, authenticated from tornado.gen import coroutine from tornado.ioloop import IOLoop -from tornado.concurrent import run_on_executor + from concurrent.futures import ThreadPoolExecutor +from tornado.process import cpu_count + +from tornado.concurrent import run_on_executor + from mako.runtime import UNDEFINED mako_lookup = None @@ -255,7 +259,7 @@ def __init__(self, *args, **kwargs): super(WebHandler, self).__init__(*args, **kwargs) self.io_loop = IOLoop.current() - executor = ThreadPoolExecutor(50) + executor = ThreadPoolExecutor(cpu_count()) @authenticated @coroutine