Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Start to vueify /history #9201

Merged
merged 53 commits into from
Mar 21, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
53 commits
Select commit Hold shift + click to select a range
6490cad
Start to vueify /history
p0psicles Feb 11, 2021
51491cf
Merge remote-tracking branch 'origin/develop' into feature/vueify-his…
p0psicles Feb 13, 2021
eddbaf2
Update history api.
p0psicles Feb 13, 2021
ac7bbbb
Fix history-compact.vue and history-detailed.vue components.
p0psicles Feb 13, 2021
c621e82
Moved checkHistory to history module.
p0psicles Feb 13, 2021
ba0360a
Fix filtering the history table.
p0psicles Feb 13, 2021
71ea7ef
Remove history-new.vue
p0psicles Feb 14, 2021
3507ebe
Fix loading history records
p0psicles Feb 14, 2021
94a4ae5
Error handling when localStorage is full
p0psicles Feb 14, 2021
7deb076
build bundles
p0psicles Feb 14, 2021
b01e928
Add pagination to compact
p0psicles Feb 14, 2021
0a1660d
Merge remote-tracking branch 'origin/develop' into feature/vueify-his…
p0psicles Feb 15, 2021
ae21170
build runtime
p0psicles Feb 15, 2021
75483db
Merge remote-tracking branch 'origin/develop' into feature/vueify-his…
p0psicles Feb 16, 2021
18dee6d
Add vue-good-table on-last-page event.
p0psicles Feb 18, 2021
3921209
Move to next page after we got new history
p0psicles Feb 19, 2021
e6546c5
Switch over to use vgt remote mode.
p0psicles Feb 21, 2021
4ab27e5
Implement history sorting. (only a select number of fields)
p0psicles Feb 23, 2021
7393fca
snakecase.
p0psicles Feb 24, 2021
eba3fd9
Merge remote-tracking branch 'origin/develop' into feature/vueify-his…
p0psicles Mar 1, 2021
3e186ec
Add basic filter.
p0psicles Mar 1, 2021
04c2a36
Check for filter value if empty
p0psicles Mar 1, 2021
834ecc2
Added filters for other columns.
p0psicles Mar 14, 2021
e0900c4
Update comments
p0psicles Mar 14, 2021
384676f
prevent keyerror
p0psicles Mar 14, 2021
78226ee
Update history api, with filters
p0psicles Mar 14, 2021
726a7af
Remove invalidSizeMessage
p0psicles Mar 14, 2021
a375ea7
Update styling for dark/light
p0psicles Mar 14, 2021
6cfbeb4
This can be episodes. Is not used icw vue-good-table anyway.
p0psicles Mar 15, 2021
6b69470
Add rowStyleClass (snatched,downloaded, ...) colors.
p0psicles Mar 15, 2021
b0013cc
Typo
p0psicles Mar 15, 2021
3a8264e
Fixed compact mode component.
p0psicles Mar 15, 2021
863c96b
Fixed bug in show-header component.
p0psicles Mar 15, 2021
d795c4d
Part of the history compact mode fixes.
p0psicles Mar 15, 2021
f006f57
Remove INITIALIZE_HISTORY_STORE.
p0psicles Mar 15, 2021
6c625b3
Fixed styling.
p0psicles Mar 15, 2021
d90cd99
Bump vue-good-table version (commit)
p0psicles Mar 15, 2021
a15a1ba
Fix KeyError
p0psicles Mar 15, 2021
583d960
Align inputs
p0psicles Mar 15, 2021
7be1968
Fix save per-page pagination value in cookie
p0psicles Mar 16, 2021
2f210da
lint and lint-css
p0psicles Mar 16, 2021
a4b3ac7
Provide subtitle provider
p0psicles Mar 18, 2021
5d79c2e
Fixed sorting
p0psicles Mar 18, 2021
e0d3232
history-compact: Fix saving sort in cookie
p0psicles Mar 21, 2021
44a4a00
lint
p0psicles Mar 21, 2021
4f735b0
Add break-word to tooltips
p0psicles Mar 21, 2021
67c8b92
Rename to `Missing Show`
p0psicles Mar 21, 2021
4ece168
Merge remote-tracking branch 'remotes/origin/develop' into feature/vu…
p0psicles Mar 21, 2021
da19e98
Fix test
p0psicles Mar 21, 2021
1fd7d67
Fix flake
p0psicles Mar 21, 2021
df3219e
yarn dev
p0psicles Mar 21, 2021
c5580d9
Remove unused imports
p0psicles Mar 21, 2021
5e02ea0
Merge branch 'feature/vueify-history' of https://github.com/pymedusa/…
p0psicles Mar 21, 2021
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 23 additions & 2 deletions medusa/history.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,10 @@
from __future__ import unicode_literals

import datetime
from os.path import basename

from medusa import db
from medusa.common import FAILED, SNATCHED, SUBTITLED
from medusa import db, ws
from medusa.common import FAILED, SNATCHED, SUBTITLED, statusStrings
from medusa.schedulers.download_handler import ClientStatusEnum as ClientStatus
from medusa.show.history import History

Expand Down Expand Up @@ -80,6 +81,26 @@ def _log_history_item(action, ep_obj, resource=None, provider=None, proper_tags=
version, proper_tags, manually_searched, info_hash, size,
provider_type, client_status, part_of_batch])

# Update the history page in frontend.
ws.Message('historyUpdate', {
'status': action,
'statusName': statusStrings.get(action),
'actionDate': log_date,
'quality': ep_obj.quality,
'resource': basename(resource),
'size': size,
'properTags': proper_tags,
'season': ep_obj.season,
'episode': ep_obj.episode,
'manuallySearched': manually_searched,
'infoHash': info_hash,
'provider': provider,
'size': size,
'providerType': provider_type,
'clientStatus': client_status,
'partOfBatch': part_of_batch
}).push()


def log_snatch(search_result):
"""
Expand Down
6 changes: 3 additions & 3 deletions medusa/server/api/v2/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,14 +327,14 @@ def _get_limit(self, default=20, maximum=1000):
except ValueError:
self._raise_bad_request_error('Invalid limit parameter')

def _paginate(self, data=None, data_generator=None, sort=None):
def _paginate(self, data=None, data_generator=None, sort=None, headers={}):
arg_page = self._get_page()
arg_limit = self._get_limit()

headers = {
headers.update({
'X-Pagination-Page': arg_page,
'X-Pagination-Limit': arg_limit
}
})

first_page = arg_page if arg_page > 0 else 1
previous_page = None if arg_page <= 1 else arg_page - 1
Expand Down
5 changes: 5 additions & 0 deletions medusa/server/api/v2/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
generate_show_queue,
)
from medusa.sbdatetime import date_presets, time_presets
from medusa.schedulers.download_handler import status_strings
from medusa.schedulers.utils import generate_schedulers
from medusa.server.api.v2.base import (
BaseRequestHandler,
Expand Down Expand Up @@ -774,6 +775,10 @@ def make_quality(value, name, key=None):
)
]

section_data['clientStatuses'] = [
{'value': k.value, 'name': v} for k, v in status_strings.items()
]

# Save it for next time
cls._generated_data_consts = section_data

Expand Down
187 changes: 177 additions & 10 deletions medusa/server/api/v2/history.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,16 @@
"""Request handler for alias (scene exceptions)."""
from __future__ import unicode_literals

import json
from os.path import basename

from medusa import db
from medusa.common import DOWNLOADED, FAILED, SNATCHED, SUBTITLED, statusStrings
from medusa.indexers.utils import indexer_id_to_name
from medusa.providers.generic_provider import GenericProvider
from medusa.schedulers.download_handler import ClientStatus
from medusa.server.api.v2.base import BaseRequestHandler
from medusa.tv.series import SeriesIdentifier
from medusa.tv.series import Series, SeriesIdentifier


class HistoryHandler(BaseRequestHandler):
Expand Down Expand Up @@ -41,18 +43,99 @@ def get(self, series_slug, path_param):

arg_page = self._get_page()
arg_limit = self._get_limit(default=50)
compact_layout = bool(self.get_argument('compact', default=False))
return_last = bool(self.get_argument('last', default=False))
total_rows = self.get_argument('total', default=None)
sort = [json.loads(item) for item in self.get_arguments('sort[]')]
filter = json.loads(self.get_argument('filter')) if self.get_arguments('filter') else None

headers = {}

if return_last:
# Return the last history row
results = db.DBConnection().select('select * from history ORDER BY date DESC LIMIT 1')
if not results:
return self._not_found('History data not found')
return self._ok(data=results[0])

where = []

if series_slug is not None:
series_identifier = SeriesIdentifier.from_slug(series_slug)
if not series_identifier:
return self._bad_request('Invalid series')

sql_base += ' WHERE indexer_id = ? AND showid = ?'
where += ['indexer_id', 'showid']
params += [series_identifier.indexer.id, series_identifier.id]

sql_base += ' ORDER BY date DESC'
field_map = {
'actiondate': 'date',
'date': 'date',
'action': 'action',
'statusname': 'action',
'provider.id': 'provider',
'clientstatus': 'client_status',
'size': 'size',
'quality': 'quality'
}

# Prepare an operator (> or <) and size, for the size query.
size_operator = None
size = None
provider = None

if filter is not None and filter.get('columnFilters'):
size = filter['columnFilters'].pop('size', None)
provider = filter['columnFilters'].pop('provider.id', None)

if size:
size_operator, size = size.split(' ')

for filter_field, filter_value in filter['columnFilters'].items():
# Loop through each column filter apply the mapping, and add to sql_base.
filter_field = field_map.get(filter_field.lower())
if not filter_field or not filter_value:
continue
where += [filter_field]
params += [filter_value]

if where:
sql_base += ' WHERE ' + ' AND '.join(f'{item} = ?' for item in where)

# Add size query (with operator)
if size_operator and size:
sql_base += f' {"AND" if where else "WHERE"} size {size_operator} ?'
params.append(int(size) * 1024 * 1024)

# Add provider with like %provider%
if provider:
sql_base += f' {"AND" if where else "WHERE"} provider LIKE ?'
params.append(f'%%{provider}%%')

if sort is not None and len(sort) == 1: # Only support one sort column right now.
field = sort[0].get('field').lower()
order = sort[0].get('type')
if field_map.get(field):
sql_base += f' ORDER BY {field_map[field]} {order} '

if total_rows:
sql_base += ' LIMIT ?'
params += [total_rows]

results = db.DBConnection().select(sql_base, params)

if compact_layout:
from collections import OrderedDict
res = OrderedDict()

for item in results:
if item.get('showid') and item.get('season') and item.get('episode') and item.get('indexer_id'):
item['showslug'] = f"{indexer_id_to_name(item['indexer_id'])}{item['showid']}"
my_key = f"{item['showslug']}S{item['season']}E{item['episode']}"
res.setdefault(my_key, []).append(item)
results = res
headers['X-Pagination-Count'] = len(results)

def data_generator():
"""Read and paginate history records."""
start = arg_limit * (arg_page - 1)
Expand All @@ -65,6 +148,8 @@ def data_generator():
subtitle_language = None
show_slug = None
client_status = None
show_slug = None
show_title = 'Missing Show'

if item['action'] in (SNATCHED, FAILED):
provider.update({
Expand All @@ -79,9 +164,7 @@ def data_generator():

if item['action'] == SUBTITLED:
subtitle_language = item['resource']

if item['action'] == SUBTITLED:
subtitle_language = item['resource']
provider['name'] = item['provider']

if item['client_status'] is not None:
status = ClientStatus(status=item['client_status'])
Expand All @@ -91,7 +174,15 @@ def data_generator():
}

if item['indexer_id'] and item['showid']:
show_slug = SeriesIdentifier.from_id(item['indexer_id'], item['showid']).slug
identifier = SeriesIdentifier.from_id(item['indexer_id'], item['showid'])
show_slug = identifier.slug
show = Series.find_by_identifier(identifier)
if show:
show_title = show.title

item['episodeTitle'] = '{0} - s{1:02d}e{2:02d}'.format(
show_title, item['season'], item['episode']
)

yield {
'id': item['rowid'],
Expand All @@ -105,22 +196,98 @@ def data_generator():
'properTags': item['proper_tags'],
'season': item['season'],
'episode': item['episode'],
'episodeTitle': item['episodeTitle'],
'manuallySearched': bool(item['manually_searched']),
'infoHash': item['info_hash'],
'provider': provider,
'releaseName': release_name,
'releaseGroup': release_group,
'fileName': file_name,
'subtitleLanguage': subtitle_language,
'showSlug': show_slug,
'showTitle': show_title,
'providerType': item['provider_type'],
'clientStatus': client_status,
'partOfBatch': bool(item['part_of_batch'])
}

if not results:
return self._not_found('History data not found')
def data_generator_compact():
"""
Read and paginate history records.

Results are provided grouped per showid+season+episode.
The results are flattened into a structure of [{'actionDate': .., 'showSlug':.., 'rows':Array(history_items)},]
"""
start = arg_limit * (arg_page - 1)

for compact_item in list(results.values())[start:start + arg_limit]:
return_item = {'rows': []}
for item in compact_item:
provider = {}
release_group = None
release_name = None
file_name = None
subtitle_language = None

if item['action'] in (SNATCHED, FAILED):
provider.update({
'id': GenericProvider.make_id(item['provider']),
'name': item['provider']
})
release_name = item['resource']

if item['action'] == DOWNLOADED:
release_group = item['provider']
file_name = item['resource']

if item['action'] == SUBTITLED:
subtitle_language = item['resource']
provider['name'] = item['provider']

item['showSlug'] = None
item['showTitle'] = 'Missing Show'
if item['indexer_id'] and item['showid']:
identifier = SeriesIdentifier.from_id(item['indexer_id'], item['showid'])
item['showSlug'] = identifier.slug
show = Series.find_by_identifier(identifier)
if show:
item['showTitle'] = show.title

return_item['actionDate'] = item['date']
return_item['showSlug'] = item['showslug']
return_item['episodeTitle'] = '{0} - s{1:02d}e{2:02d}'.format(
item['showTitle'], item['season'], item['episode']
)
return_item['quality'] = item['quality']

return_item['rows'].append({
'actionDate': item['date'],
'id': item['rowid'],
'series': item['showSlug'],
'status': item['action'],
'statusName': statusStrings.get(item['action']),
'quality': item['quality'],
'resource': basename(item['resource']),
'size': item['size'],
'properTags': item['proper_tags'],
'season': item['season'],
'episode': item['episode'],
'manuallySearched': bool(item['manually_searched']),
'infoHash': item['info_hash'],
'provider': provider,
'release_name': release_name,
'releaseGroup': release_group,
'fileName': file_name,
'subtitleLanguage': subtitle_language,
'showSlug': item['showslug'],
'showTitle': item['showTitle']
})
yield return_item

if compact_layout:
return self._paginate(data_generator=data_generator_compact, headers=headers)

return self._paginate(data_generator=data_generator)
return self._paginate(data_generator=data_generator, headers=headers)

def delete(self, identifier, **kwargs):
"""Delete a history record."""
Expand Down
27 changes: 8 additions & 19 deletions medusa/server/web/core/history.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@

from __future__ import unicode_literals

from medusa import app, ui
from medusa.helper.common import try_int
from medusa import ui
from medusa.server.web.core.base import PageTemplate, WebRoot
from medusa.show.history import History as HistoryTool

Expand All @@ -17,24 +16,14 @@ def __init__(self, *args, **kwargs):

self.history = HistoryTool()

def index(self, limit=None):
if limit is None:
if app.HISTORY_LIMIT:
limit = int(app.HISTORY_LIMIT)
else:
limit = 100
else:
limit = try_int(limit, 100)

app.HISTORY_LIMIT = limit

app.instance.save_config()

history = self.history.get(limit)
def index(self):
"""
Render the history page.

t = PageTemplate(rh=self, filename='history.mako')
return t.render(historyResults=history.detailed, compactResults=history.compact, limit=limit,
controller='history', action='index')
[Converted to VueRouter]
"""
t = PageTemplate(rh=self, filename='index.mako')
return t.render()

def clearHistory(self):
# @TODO: Replace this with DELETE /api/v2/history
Expand Down
Loading