Skip to content

Commit

Permalink
Start to vueify /history (#9201)
Browse files Browse the repository at this point in the history
* Start to vueify /history

* Update history api.

* Fix history-compact.vue and history-detailed.vue components.

* Moved checkHistory to history module.

* Fix filtering the history table.

* Remove history-new.vue
re-build runtime.

* Fix loading history records
 * indexer_id not set
 * show removed from library but records in history.

* Error handling when localStorage is full
* Add flag to show while loading history

* build bundles

* Add pagination to compact

* build runtime

* Add vue-good-table on-last-page event.
* Utilize the last-page event to get a new page.

* Move to next page after we got new history

* Switch over to use vgt remote mode.

* Implement history sorting. (only a select number of fields)

* snakecase.
Start with adding columnFilter.

* Add basic filter.

* Check for filter value if empty

* Added filters for other columns.

* Update comments

* prevent keyerror

* Update history api, with filters

* Remove invalidSizeMessage
I'm not using it

* Update styling for dark/light

* This can be episodes. Is not used icw vue-good-table anyway.

* Add rowStyleClass (snatched,downloaded, ...) colors.
* Cleanup comments

* Typo

* Fixed compact mode component.

* Fixed bug in show-header component.

* Part of the history compact mode fixes.
* Removed localStorage.

* Remove INITIALIZE_HISTORY_STORE.
* Not used

* Fixed styling.

* Bump vue-good-table version (commit)

* Fix KeyError

* Align inputs

* Fix save per-page pagination value in cookie

* lint and lint-css

* Provide subtitle provider

* Fixed sorting
* Save sorting through cookie
* Save filter through cookie
* Link to show in episodeTitle
* Show subtitle provider

* history-compact: Fix saving sort in cookie

* lint

* Add break-word to tooltips

* Rename to `Missing Show`

* Fix test

* Fix flake

* yarn dev

* Remove unused imports
  • Loading branch information
p0psicles authored Mar 21, 2021
1 parent a313b4d commit 108527f
Show file tree
Hide file tree
Showing 37 changed files with 1,785 additions and 425 deletions.
25 changes: 23 additions & 2 deletions medusa/history.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,10 @@
from __future__ import unicode_literals

import datetime
from os.path import basename

from medusa import db
from medusa.common import FAILED, SNATCHED, SUBTITLED
from medusa import db, ws
from medusa.common import FAILED, SNATCHED, SUBTITLED, statusStrings
from medusa.schedulers.download_handler import ClientStatusEnum as ClientStatus
from medusa.show.history import History

Expand Down Expand Up @@ -80,6 +81,26 @@ def _log_history_item(action, ep_obj, resource=None, provider=None, proper_tags=
version, proper_tags, manually_searched, info_hash, size,
provider_type, client_status, part_of_batch])

# Update the history page in frontend.
ws.Message('historyUpdate', {
'status': action,
'statusName': statusStrings.get(action),
'actionDate': log_date,
'quality': ep_obj.quality,
'resource': basename(resource),
'size': size,
'properTags': proper_tags,
'season': ep_obj.season,
'episode': ep_obj.episode,
'manuallySearched': manually_searched,
'infoHash': info_hash,
'provider': provider,
'size': size,
'providerType': provider_type,
'clientStatus': client_status,
'partOfBatch': part_of_batch
}).push()


def log_snatch(search_result):
"""
Expand Down
6 changes: 3 additions & 3 deletions medusa/server/api/v2/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,14 +327,14 @@ def _get_limit(self, default=20, maximum=1000):
except ValueError:
self._raise_bad_request_error('Invalid limit parameter')

def _paginate(self, data=None, data_generator=None, sort=None):
def _paginate(self, data=None, data_generator=None, sort=None, headers={}):
arg_page = self._get_page()
arg_limit = self._get_limit()

headers = {
headers.update({
'X-Pagination-Page': arg_page,
'X-Pagination-Limit': arg_limit
}
})

first_page = arg_page if arg_page > 0 else 1
previous_page = None if arg_page <= 1 else arg_page - 1
Expand Down
5 changes: 5 additions & 0 deletions medusa/server/api/v2/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
generate_show_queue,
)
from medusa.sbdatetime import date_presets, time_presets
from medusa.schedulers.download_handler import status_strings
from medusa.schedulers.utils import generate_schedulers
from medusa.server.api.v2.base import (
BaseRequestHandler,
Expand Down Expand Up @@ -774,6 +775,10 @@ def make_quality(value, name, key=None):
)
]

section_data['clientStatuses'] = [
{'value': k.value, 'name': v} for k, v in status_strings.items()
]

# Save it for next time
cls._generated_data_consts = section_data

Expand Down
187 changes: 177 additions & 10 deletions medusa/server/api/v2/history.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,16 @@
"""Request handler for alias (scene exceptions)."""
from __future__ import unicode_literals

import json
from os.path import basename

from medusa import db
from medusa.common import DOWNLOADED, FAILED, SNATCHED, SUBTITLED, statusStrings
from medusa.indexers.utils import indexer_id_to_name
from medusa.providers.generic_provider import GenericProvider
from medusa.schedulers.download_handler import ClientStatus
from medusa.server.api.v2.base import BaseRequestHandler
from medusa.tv.series import SeriesIdentifier
from medusa.tv.series import Series, SeriesIdentifier


class HistoryHandler(BaseRequestHandler):
Expand Down Expand Up @@ -41,18 +43,99 @@ def get(self, series_slug, path_param):

arg_page = self._get_page()
arg_limit = self._get_limit(default=50)
compact_layout = bool(self.get_argument('compact', default=False))
return_last = bool(self.get_argument('last', default=False))
total_rows = self.get_argument('total', default=None)
sort = [json.loads(item) for item in self.get_arguments('sort[]')]
filter = json.loads(self.get_argument('filter')) if self.get_arguments('filter') else None

headers = {}

if return_last:
# Return the last history row
results = db.DBConnection().select('select * from history ORDER BY date DESC LIMIT 1')
if not results:
return self._not_found('History data not found')
return self._ok(data=results[0])

where = []

if series_slug is not None:
series_identifier = SeriesIdentifier.from_slug(series_slug)
if not series_identifier:
return self._bad_request('Invalid series')

sql_base += ' WHERE indexer_id = ? AND showid = ?'
where += ['indexer_id', 'showid']
params += [series_identifier.indexer.id, series_identifier.id]

sql_base += ' ORDER BY date DESC'
field_map = {
'actiondate': 'date',
'date': 'date',
'action': 'action',
'statusname': 'action',
'provider.id': 'provider',
'clientstatus': 'client_status',
'size': 'size',
'quality': 'quality'
}

# Prepare an operator (> or <) and size, for the size query.
size_operator = None
size = None
provider = None

if filter is not None and filter.get('columnFilters'):
size = filter['columnFilters'].pop('size', None)
provider = filter['columnFilters'].pop('provider.id', None)

if size:
size_operator, size = size.split(' ')

for filter_field, filter_value in filter['columnFilters'].items():
# Loop through each column filter apply the mapping, and add to sql_base.
filter_field = field_map.get(filter_field.lower())
if not filter_field or not filter_value:
continue
where += [filter_field]
params += [filter_value]

if where:
sql_base += ' WHERE ' + ' AND '.join(f'{item} = ?' for item in where)

# Add size query (with operator)
if size_operator and size:
sql_base += f' {"AND" if where else "WHERE"} size {size_operator} ?'
params.append(int(size) * 1024 * 1024)

# Add provider with like %provider%
if provider:
sql_base += f' {"AND" if where else "WHERE"} provider LIKE ?'
params.append(f'%%{provider}%%')

if sort is not None and len(sort) == 1: # Only support one sort column right now.
field = sort[0].get('field').lower()
order = sort[0].get('type')
if field_map.get(field):
sql_base += f' ORDER BY {field_map[field]} {order} '

if total_rows:
sql_base += ' LIMIT ?'
params += [total_rows]

results = db.DBConnection().select(sql_base, params)

if compact_layout:
from collections import OrderedDict
res = OrderedDict()

for item in results:
if item.get('showid') and item.get('season') and item.get('episode') and item.get('indexer_id'):
item['showslug'] = f"{indexer_id_to_name(item['indexer_id'])}{item['showid']}"
my_key = f"{item['showslug']}S{item['season']}E{item['episode']}"
res.setdefault(my_key, []).append(item)
results = res
headers['X-Pagination-Count'] = len(results)

def data_generator():
"""Read and paginate history records."""
start = arg_limit * (arg_page - 1)
Expand All @@ -65,6 +148,8 @@ def data_generator():
subtitle_language = None
show_slug = None
client_status = None
show_slug = None
show_title = 'Missing Show'

if item['action'] in (SNATCHED, FAILED):
provider.update({
Expand All @@ -79,9 +164,7 @@ def data_generator():

if item['action'] == SUBTITLED:
subtitle_language = item['resource']

if item['action'] == SUBTITLED:
subtitle_language = item['resource']
provider['name'] = item['provider']

if item['client_status'] is not None:
status = ClientStatus(status=item['client_status'])
Expand All @@ -91,7 +174,15 @@ def data_generator():
}

if item['indexer_id'] and item['showid']:
show_slug = SeriesIdentifier.from_id(item['indexer_id'], item['showid']).slug
identifier = SeriesIdentifier.from_id(item['indexer_id'], item['showid'])
show_slug = identifier.slug
show = Series.find_by_identifier(identifier)
if show:
show_title = show.title

item['episodeTitle'] = '{0} - s{1:02d}e{2:02d}'.format(
show_title, item['season'], item['episode']
)

yield {
'id': item['rowid'],
Expand All @@ -105,22 +196,98 @@ def data_generator():
'properTags': item['proper_tags'],
'season': item['season'],
'episode': item['episode'],
'episodeTitle': item['episodeTitle'],
'manuallySearched': bool(item['manually_searched']),
'infoHash': item['info_hash'],
'provider': provider,
'releaseName': release_name,
'releaseGroup': release_group,
'fileName': file_name,
'subtitleLanguage': subtitle_language,
'showSlug': show_slug,
'showTitle': show_title,
'providerType': item['provider_type'],
'clientStatus': client_status,
'partOfBatch': bool(item['part_of_batch'])
}

if not results:
return self._not_found('History data not found')
def data_generator_compact():
"""
Read and paginate history records.
Results are provided grouped per showid+season+episode.
The results are flattened into a structure of [{'actionDate': .., 'showSlug':.., 'rows':Array(history_items)},]
"""
start = arg_limit * (arg_page - 1)

for compact_item in list(results.values())[start:start + arg_limit]:
return_item = {'rows': []}
for item in compact_item:
provider = {}
release_group = None
release_name = None
file_name = None
subtitle_language = None

if item['action'] in (SNATCHED, FAILED):
provider.update({
'id': GenericProvider.make_id(item['provider']),
'name': item['provider']
})
release_name = item['resource']

if item['action'] == DOWNLOADED:
release_group = item['provider']
file_name = item['resource']

if item['action'] == SUBTITLED:
subtitle_language = item['resource']
provider['name'] = item['provider']

item['showSlug'] = None
item['showTitle'] = 'Missing Show'
if item['indexer_id'] and item['showid']:
identifier = SeriesIdentifier.from_id(item['indexer_id'], item['showid'])
item['showSlug'] = identifier.slug
show = Series.find_by_identifier(identifier)
if show:
item['showTitle'] = show.title

return_item['actionDate'] = item['date']
return_item['showSlug'] = item['showslug']
return_item['episodeTitle'] = '{0} - s{1:02d}e{2:02d}'.format(
item['showTitle'], item['season'], item['episode']
)
return_item['quality'] = item['quality']

return_item['rows'].append({
'actionDate': item['date'],
'id': item['rowid'],
'series': item['showSlug'],
'status': item['action'],
'statusName': statusStrings.get(item['action']),
'quality': item['quality'],
'resource': basename(item['resource']),
'size': item['size'],
'properTags': item['proper_tags'],
'season': item['season'],
'episode': item['episode'],
'manuallySearched': bool(item['manually_searched']),
'infoHash': item['info_hash'],
'provider': provider,
'release_name': release_name,
'releaseGroup': release_group,
'fileName': file_name,
'subtitleLanguage': subtitle_language,
'showSlug': item['showslug'],
'showTitle': item['showTitle']
})
yield return_item

if compact_layout:
return self._paginate(data_generator=data_generator_compact, headers=headers)

return self._paginate(data_generator=data_generator)
return self._paginate(data_generator=data_generator, headers=headers)

def delete(self, identifier, **kwargs):
"""Delete a history record."""
Expand Down
27 changes: 8 additions & 19 deletions medusa/server/web/core/history.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@

from __future__ import unicode_literals

from medusa import app, ui
from medusa.helper.common import try_int
from medusa import ui
from medusa.server.web.core.base import PageTemplate, WebRoot
from medusa.show.history import History as HistoryTool

Expand All @@ -17,24 +16,14 @@ def __init__(self, *args, **kwargs):

self.history = HistoryTool()

def index(self, limit=None):
if limit is None:
if app.HISTORY_LIMIT:
limit = int(app.HISTORY_LIMIT)
else:
limit = 100
else:
limit = try_int(limit, 100)

app.HISTORY_LIMIT = limit

app.instance.save_config()

history = self.history.get(limit)
def index(self):
"""
Render the history page.
t = PageTemplate(rh=self, filename='history.mako')
return t.render(historyResults=history.detailed, compactResults=history.compact, limit=limit,
controller='history', action='index')
[Converted to VueRouter]
"""
t = PageTemplate(rh=self, filename='index.mako')
return t.render()

def clearHistory(self):
# @TODO: Replace this with DELETE /api/v2/history
Expand Down
Loading

0 comments on commit 108527f

Please sign in to comment.