Merge pull request #1324 from MoojMidge/v7.3

v7.3.0+beta.8
This commit is contained in:
MoojMidge 2025-10-26 19:14:48 +11:00 committed by GitHub
commit d5e9dfa56f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
23 changed files with 306 additions and 161 deletions

View file

@ -1,5 +1,5 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<addon id="plugin.video.youtube" name="YouTube" version="7.3.0+beta.7" provider-name="anxdpanic, bromix, MoojMidge">
<addon id="plugin.video.youtube" name="YouTube" version="7.3.0+beta.8" provider-name="anxdpanic, bromix, MoojMidge">
<requires>
<import addon="xbmc.python" version="3.0.0"/>
<import addon="script.module.requests" version="2.27.1"/>

View file

@ -1,3 +1,16 @@
## v7.3.0+beta.8
### Fixed
- Fix regression in handling audio only setting after d154325c5b672dccc6a17413063cfdeb32256ffd
- Fix comments not using correct sort methods
- Fix incorrectly using playlist cache entries that have been invalidated by playlist modification
- Fix some context menu actions failing for video item bookmarks
- Ensure listings and items added by the addon have correct sort order
- Fix resetting client region when playing media with subtitles enabled
### Changed
- Improve offline access to cached data
- Updates to SQLite database lock handling
## v7.3.0+beta.7
### Fixed
- Only add playable items to playlist when adding related items

View file

@ -203,6 +203,7 @@ class XbmcContext(AbstractContext):
'httpd.connect.wait': 13028,
'httpd.connect.failed': 1001,
'inputstreamhelper.is_installed': 30625,
'internet.connection.required': 21451,
'isa.enable.check': 30579,
'key.requirement': 30731,
'liked.video': 30716,
@ -710,7 +711,6 @@ class XbmcContext(AbstractContext):
xbmcplugin.setPluginCategory(self._plugin_handle, category_label)
detailed_labels = self.get_settings().show_detailed_labels()
if content_type == CONTENT.VIDEO_CONTENT:
if sub_type == CONTENT.HISTORY:
self.add_sort_method(
SORT.HISTORY_CONTENT_DETAILED
@ -729,7 +729,7 @@ class XbmcContext(AbstractContext):
if detailed_labels else
SORT.PLAYLIST_CONTENT_SIMPLE
)
else:
elif content_type == CONTENT.VIDEO_CONTENT:
self.add_sort_method(
SORT.VIDEO_CONTENT_DETAILED
if detailed_labels else

View file

@ -28,7 +28,7 @@ class BaseItem(object):
_version = 3
_playable = False
def __init__(self, name, uri, image=None, fanart=None, **kwargs):
def __init__(self, name, uri, image=None, fanart=None, **_kwargs):
super(BaseItem, self).__init__()
self._name = None
self.set_name(name)

View file

@ -71,6 +71,7 @@ class NextPageItem(DirectoryItem):
image=image,
fanart=fanart,
category_label='__inherit__',
special_sort='bottom',
)
self.next_page = page

View file

@ -95,7 +95,7 @@ class NewSearchItem(DirectoryItem):
channel_id='',
addon_id='',
location=False,
**_kwargs):
**kwargs):
if not name:
name = context.get_ui().bold(
title or context.localize('search.new')
@ -120,7 +120,8 @@ class NewSearchItem(DirectoryItem):
params=params,
),
image=image,
fanart=fanart)
fanart=fanart,
**kwargs)
if context.is_plugin_path(context.get_uri(), ((PATHS.SEARCH, 'list'),)):
context_menu = [

View file

@ -562,32 +562,39 @@ def directory_listitem(context, directory_item, show_fanart=None, **_kwargs):
if directory_item.next_page:
props['specialSort'] = 'bottom'
else:
special_sort = directory_item.get_special_sort()
if special_sort is None:
_special_sort = directory_item.get_special_sort()
if _special_sort is None:
special_sort = 'top'
elif special_sort is False:
elif _special_sort is False:
special_sort = None
else:
special_sort = _special_sort
prop_value = directory_item.subscription_id
if prop_value:
special_sort = None
special_sort = _special_sort
props[SUBSCRIPTION_ID] = prop_value
prop_value = directory_item.channel_id
if prop_value:
special_sort = None
special_sort = _special_sort
props[CHANNEL_ID] = prop_value
prop_value = directory_item.playlist_id
if prop_value:
special_sort = None
special_sort = _special_sort
props[PLAYLIST_ID] = prop_value
prop_value = directory_item.bookmark_id
if prop_value:
special_sort = None
special_sort = _special_sort
props[BOOKMARK_ID] = prop_value
prop_value = is_action and getattr(directory_item, VIDEO_ID, None)
if prop_value:
special_sort = _special_sort
props[VIDEO_ID] = prop_value
if special_sort:
props['specialSort'] = special_sort

View file

@ -337,7 +337,13 @@ class KodiLogger(logging.Logger):
msg = MessageFormatter(msg, *args[1:-1], **kwargs)
args = ()
stack_info = stack_info and (exc_info or self.stack_info)
if stack_info:
if exc_info or self.stack_info:
pass
elif stack_info == 'forced':
stack_info = True
else:
stack_info = False
sinfo = None
if _srcfiles:
try:

View file

@ -402,7 +402,7 @@ class BaseRequestsClass(object):
stacklevel=stacklevel)
cache.set(request_id)
response = cached_response
else:
elif response is not None:
self.log.debug(('Saving response to cache',
'Request ID: {request_id}',
'Etag: {etag}',

View file

@ -69,6 +69,7 @@ def run(context=_context,
old_path = context.get_path().rstrip('/')
old_uri = ui.get_container_info(FOLDER_URI, container_id=None)
old_handle = context.get_handle()
context.init()
current_path = context.get_path().rstrip('/')
current_params = context.get_original_params()
@ -80,7 +81,7 @@ def run(context=_context,
params = context.get_params()
refresh = context.refresh_requested(params=params)
was_playing = old_path == PATHS.PLAY
is_same_path = current_path == old_path
is_same_path = current_path == old_path and old_handle != -1
if was_playing or is_same_path or refresh:
old_path, old_params = context.parse_uri(

View file

@ -75,6 +75,7 @@ class DataCache(Storage):
def set_items(self, items):
self._set_many(items)
self._optimize_file_size()
def del_item(self, content_id):
self._remove(content_id)

View file

@ -37,6 +37,7 @@ class RequestCache(Storage):
self._update(request_id, item, timestamp)
else:
self._set(request_id, item)
self._optimize_file_size()
else:
self._refresh(request_id, timestamp)

View file

@ -24,15 +24,32 @@ from ..utils.file_system import make_dirs
class StorageLock(object):
def __init__(self):
self._lock = RLock()
self._num_accessing = 0
self._num_waiting = 0
def __enter__(self):
self._num_waiting += 1
self._lock.acquire()
locked = not self._lock.acquire(timeout=3)
self._num_waiting -= 1
return locked
def __exit__(self, exc_type, exc_val, exc_tb):
try:
self._lock.release()
except RuntimeError:
pass
def accessing(self, start=False, done=False):
if start:
self._num_accessing += 1
elif done:
self._num_accessing -= 1
num = self._num_accessing
if num > 0:
return True
if num < 0:
self._num_accessing = 0
return False
def waiting(self):
return self._num_waiting > 0
@ -206,7 +223,6 @@ class Storage(object):
self.uuid = filepath[1]
self._filepath = os.path.join(*filepath)
self._db = None
self._cursor = None
self._lock = StorageLock()
self._close_timer = None
self._max_item_count = -1 if migrate else max_item_count
@ -253,21 +269,23 @@ class Storage(object):
if close_timer:
close_timer.cancel()
self._close_timer = None
if self._db and self._cursor:
return self._db, self._cursor
return self._open()
self._lock.accessing(start=True)
db = self._db or self._open()
cursor = db.cursor()
cursor.arraysize = 100
return db, cursor
def __exit__(self, exc_type=None, exc_val=None, exc_tb=None):
close_timer = self._close_timer
if close_timer:
close_timer.cancel()
if self._lock.waiting():
self._close_timer = None
return
if not self._lock.accessing(done=True) and not self._lock.waiting():
close_timer = Timer(5, self._close)
close_timer.daemon = True
close_timer.start()
self._close_timer = close_timer
else:
self._close_timer = None
def _open(self):
statements = []
@ -281,24 +299,23 @@ class Storage(object):
for attempt in range(1, 4):
try:
db = sqlite3.connect(self._filepath,
# cached_statements=0,
check_same_thread=False,
isolation_level=None)
break
except (sqlite3.Error, sqlite3.OperationalError) as exc:
if attempt < 3 and isinstance(exc, sqlite3.OperationalError):
self.log.warning('Retry, attempt %d of 3',
self.log.warning('Attempt %d of 3',
attempt,
exc_info=True)
time.sleep(0.1)
else:
self.log.exception('Failed')
return None, None
return None
else:
return None, None
return None
cursor = db.cursor()
cursor.arraysize = 100
sql_script = [
'PRAGMA busy_timeout = 1000;',
@ -335,21 +352,19 @@ class Storage(object):
self._base._table_updated = True
self._db = db
self._cursor = cursor
return db, cursor
return db
def _close(self):
cursor = self._cursor
if cursor:
self._execute(cursor, 'PRAGMA optimize')
cursor.close()
self._cursor = None
def _close(self, commit=False):
db = self._db
if db:
if not db or self._lock.accessing() or self._lock.waiting():
return False
self._execute(db.cursor(), 'PRAGMA optimize')
# Not needed if using db as a context manager
# db.commit()
if commit:
db.commit()
db.close()
self._db = None
return True
def _execute(self, cursor, query, values=None, many=False, script=False):
if not cursor:
@ -370,14 +385,19 @@ class Storage(object):
return cursor.executescript(query)
return cursor.execute(query, values)
except (sqlite3.Error, sqlite3.OperationalError) as exc:
if attempt < 3 and isinstance(exc, sqlite3.OperationalError):
self.log.warning('Retry, attempt %d of 3',
attempt,
exc_info=True)
if attempt < 3:
if isinstance(exc, sqlite3.OperationalError):
time.sleep(0.1)
elif isinstance(exc, sqlite3.InterfaceError):
cursor = self._db.cursor()
else:
self.log.exception('Failed')
break
self.log.warning('Attempt %d of 3',
attempt,
exc_info=True)
else:
self.log.exception('Failed')
return []
return []
def _optimize_file_size(self, defer=False):
@ -385,11 +405,12 @@ class Storage(object):
if self._max_file_size_kb <= 0:
return False
with self._lock, self as (db, cursor), db:
with self as (db, cursor):
result = self._execute(cursor, self._sql['get_total_data_size'])
if result:
size_kb = result.fetchone()[0] // 1024
result = result.fetchone() if result else None
result = result[0] if result else None
if result is not None:
size_kb = result // 1024
else:
try:
size_kb = (os.path.getsize(self._filepath) // 1024)
@ -403,7 +424,9 @@ class Storage(object):
query = self._sql['prune_by_size'].format(prune_size)
if defer:
return query
with self._lock, self as (db, cursor), db:
with self._lock as locked, self as (db, cursor), db:
if locked:
return False
self._execute(cursor, query)
self._execute(cursor, 'VACUUM')
return True
@ -424,7 +447,9 @@ class Storage(object):
)
if defer:
return query
with self._lock, self as (db, cursor), db:
with self._lock as locked, self as (db, cursor), db:
if locked:
return False
self._execute(cursor, query)
self._execute(cursor, 'VACUUM')
return True
@ -432,11 +457,14 @@ class Storage(object):
def _set(self, item_id, item, timestamp=None):
values = self._encode(item_id, item, timestamp)
optimize_query = self._optimize_item_count(1, defer=True)
with self._lock, self as (db, cursor), db:
self._execute(cursor, 'BEGIN')
with self._lock as locked, self as (db, cursor), db:
if locked:
return False
if optimize_query:
self._execute(cursor, 'BEGIN')
self._execute(cursor, optimize_query)
self._execute(cursor, self._sql['set'], values=values)
return True
def _set_many(self, items, flatten=False):
now = since_epoch()
@ -455,35 +483,44 @@ class Storage(object):
query = self._sql['set']
optimize_query = self._optimize_item_count(num_items, defer=True)
with self._lock, self as (db, cursor), db:
self._execute(cursor, 'BEGIN')
with self._lock as locked, self as (db, cursor), db:
if locked:
return False
if optimize_query:
self._execute(cursor, 'BEGIN')
self._execute(cursor, optimize_query)
self._execute(cursor, query, many=(not flatten), values=values)
self._execute(cursor, 'COMMIT')
self._optimize_file_size()
return True
def _refresh(self, item_id, timestamp=None):
values = (timestamp or since_epoch(), to_str(item_id))
with self._lock, self as (db, cursor), db:
with self._lock as locked, self as (db, cursor), db:
if locked:
return False
self._execute(cursor, self._sql['refresh'], values=values)
return True
def _update(self, item_id, item, timestamp=None):
values = self._encode(item_id, item, timestamp, for_update=True)
with self._lock, self as (db, cursor), db:
with self._lock as locked, self as (db, cursor), db:
if locked:
return False
self._execute(cursor, self._sql['update'], values=values)
return True
def clear(self, defer=False):
query = self._sql['clear']
if defer:
return query
with self._lock, self as (db, cursor), db:
with self._lock as locked, self as (db, cursor), db:
if locked:
return False
self._execute(cursor, query)
self._execute(cursor, 'VACUUM')
return True
def is_empty(self):
with self as (db, cursor), db:
with self as (db, cursor):
result = self._execute(cursor, self._sql['is_empty'])
for item in result:
is_empty = item[0] == 0
@ -520,10 +557,10 @@ class Storage(object):
seconds=None,
as_dict=False,
with_timestamp=False):
with self._lock, self as (db, cursor), db:
result = self._execute(cursor, self._sql['get'], [to_str(item_id)])
with self as (db, cursor):
result = self._execute(cursor, self._sql['get'], (to_str(item_id),))
item = result.fetchone() if result else None
if not item:
if not item or not all(item):
return None
cut_off = since_epoch() - seconds if seconds else 0
if not cut_off or item[1] >= cut_off:
@ -569,9 +606,11 @@ class Storage(object):
epoch = since_epoch()
cut_off = epoch - seconds if seconds else 0
with self._lock, self as (db, cursor), db:
with self as (db, cursor):
result = self._execute(cursor, query, item_ids)
if as_dict:
if not result:
pass
elif as_dict:
if values_only:
result = {
item[0]: self._decode(item[2], process, item)
@ -600,12 +639,18 @@ class Storage(object):
return result
def _remove(self, item_id):
with self._lock, self as (db, cursor), db:
with self._lock as locked, self as (db, cursor), db:
if locked:
return False
self._execute(cursor, self._sql['remove'], [item_id])
return True
def _remove_many(self, item_ids):
num_ids = len(item_ids)
query = self._sql['remove_by_key'].format('?,' * (num_ids - 1) + '?')
with self._lock, self as (db, cursor), db:
with self._lock as locked, self as (db, cursor), db:
if locked:
return False
self._execute(cursor, query, tuple(item_ids))
self._execute(cursor, 'VACUUM')
return True

View file

@ -1239,7 +1239,7 @@ class YouTubeDataClient(YouTubeLoginClient):
max_results = self.max_results()
params = {
'part': 'snippet,contentDetails,brandingSettings,statistics',
'maxResults': str(max_results),
'maxResults': max_results,
}
if channel_id == 'mine':

View file

@ -984,13 +984,16 @@ class YouTubeRequestClient(BaseRequestsClass):
return client
def internet_available(self):
def internet_available(self, notify=True):
response = self.request(**self.CLIENTS['generate_204'])
if response is None:
return False
if response is not None:
with response:
if response.status_code == 204:
return True
if notify:
self._context.get_ui().show_notification(
self._context.localize('internet.connection.required')
)
return False
@classmethod

View file

@ -80,7 +80,12 @@ class Subtitles(YouTubeRequestClient):
}
def __init__(self, context, video_id, use_mpd=None):
super(Subtitles, self).__init__(context=context)
settings = context.get_settings()
super(Subtitles, self).__init__(
context=context,
language=settings.get_language(),
region=settings.get_region(),
)
self.video_id = video_id
@ -90,7 +95,6 @@ class Subtitles(YouTubeRequestClient):
self.caption_tracks = None
self.translation_langs = None
settings = context.get_settings()
self.pre_download = settings.subtitle_download()
self.sub_selection = settings.get_subtitle_selection()
stream_features = settings.stream_features()

View file

@ -102,11 +102,15 @@ class ResourceManager(object):
None if forced_cache else data_cache.ONE_DAY,
memory_store=self.new_data,
)
to_update = [id_ for id_ in ids
to_update = (
[]
if forced_cache else
[id_ for id_ in ids
if id_
and (id_ not in result
or not result[id_]
or result[id_].get('_partial'))]
)
if result:
self.log.debugging and self.log.debug(
@ -192,11 +196,15 @@ class ResourceManager(object):
None if forced_cache else data_cache.ONE_MONTH,
memory_store=self.new_data,
))
to_update = [id_ for id_ in ids
to_update = (
[]
if forced_cache else
[id_ for id_ in ids
if id_
and (id_ not in result
or not result[id_]
or result[id_].get('_partial'))]
)
if result:
self.log.debugging and self.log.debug(
@ -299,11 +307,15 @@ class ResourceManager(object):
None if forced_cache else data_cache.ONE_DAY,
memory_store=self.new_data,
)
to_update = [id_ for id_ in ids
to_update = (
[]
if forced_cache else
[id_ for id_ in ids
if id_
and (id_ not in result
or not result[id_]
or result[id_].get('_partial'))]
)
if result:
self.log.debugging and self.log.debug(
@ -410,11 +422,15 @@ class ResourceManager(object):
as_dict=True,
)
if not batch:
if not forced_cache:
to_update.append(batch_id)
break
age = batch.get('age')
batch = batch.get('value')
if forced_cache:
if not batch:
to_update.append(batch_id)
break
elif forced_cache:
result[batch_id] = batch
elif page_token:
if age <= data_cache.ONE_DAY:
@ -564,7 +580,10 @@ class ResourceManager(object):
None if forced_cache else data_cache.ONE_MONTH,
memory_store=self.new_data,
)
to_update = [id_ for id_ in ids
to_update = (
[]
if forced_cache else
[id_ for id_ in ids
if id_
and (id_ not in result
or not result[id_]
@ -572,6 +591,7 @@ class ResourceManager(object):
or (yt_items_dict
and yt_items_dict.get(id_)
and result[id_].get('_unavailable')))]
)
if result:
self.log.debugging and self.log.debug(

View file

@ -149,6 +149,7 @@ def make_comment_item(context, snippet, uri, reply_count=0):
category_label=' - '.join(
(author, context.format_date_short(local_datetime))
),
special_sort=False,
)
else:
comment_item = CommandItem(
@ -890,7 +891,9 @@ def update_video_items(provider, context, video_id_dict,
label_stats = []
stats = []
rating = [0, 0]
rating = 0
likes = 0
views = 0
if 'statistics' in yt_item:
for stat, value in yt_item['statistics'].items():
label = context.LOCAL_MAP.get('stats.' + stat)
@ -912,21 +915,23 @@ def update_video_items(provider, context, video_id_dict,
)))))
if stat == 'likeCount':
rating[0] = value
likes = value
elif stat == 'viewCount':
rating[1] = value
media_item.set_count(value)
views = value
media_item.set_count(views)
label_stats = ' | '.join(label_stats)
stats = ' | '.join(stats)
if 0 < rating[0] <= rating[1]:
if rating[0] == rating[1]:
if 0 < likes <= views:
if likes == views:
rating = 10
else:
# This is a completely made up, arbitrary ranking score
rating = (10 * (log10(rating[1]) * log10(rating[0]))
/ (log10(rating[0] + rating[1]) ** 2))
rating = (
10 * (log10(views) * log10(likes))
/ (log10(likes + views) ** 2)
)
media_item.set_rating(rating)
# Used for label2, but is poorly supported in skins

View file

@ -86,6 +86,7 @@ def _process_list_response(provider,
channel_items_dict = {}
items = []
position = 0
do_callbacks = False
params = context.get_params()
@ -226,8 +227,8 @@ def _process_list_response(provider,
image=image,
fanart=fanart,
plot=description,
video_id=video_id,
channel_id=channel_id)
channel_id=channel_id,
**item_params)
elif kind_type == 'channel':
channel_id = item_id
@ -241,7 +242,8 @@ def _process_list_response(provider,
fanart=fanart,
plot=description,
category_label=title,
channel_id=channel_id)
channel_id=channel_id,
**item_params)
elif kind_type == 'guidecategory':
item_params['guide_id'] = item_id
@ -254,7 +256,8 @@ def _process_list_response(provider,
image=image,
fanart=fanart,
plot=description,
category_label=title)
category_label=title,
**item_params)
elif kind_type == 'subscription':
subscription_id = item_id
@ -272,7 +275,8 @@ def _process_list_response(provider,
plot=description,
category_label=title,
channel_id=channel_id,
subscription_id=subscription_id)
subscription_id=subscription_id,
**item_params)
elif kind_type == 'searchfolder':
if item_filter and item_filter.get(HIDE_SEARCH):
@ -360,7 +364,8 @@ def _process_list_response(provider,
plot=description,
category_label=title,
channel_id=channel_id,
playlist_id=playlist_id)
playlist_id=playlist_id,
**item_params)
item.available = yt_item.get('_available', False)
elif kind_type == 'playlistitem':
@ -383,10 +388,10 @@ def _process_list_response(provider,
image=image,
fanart=fanart,
plot=description,
video_id=video_id,
channel_id=channel_id,
playlist_id=playlist_id,
playlist_item_id=playlist_item_id)
playlist_item_id=playlist_item_id,
**item_params)
# date time
published_at = snippet.get('publishedAt')
@ -415,7 +420,7 @@ def _process_list_response(provider,
image=image,
fanart=fanart,
plot=description,
video_id=video_id)
**item_params)
elif kind_type.startswith('comment'):
if kind_type == 'commentthread':
@ -435,8 +440,6 @@ def _process_list_response(provider,
snippet,
uri=item_uri,
reply_count=reply_count)
position = snippet.get('position') or len(items)
item.set_track_number(position + 1)
elif kind_type == 'bookmarkitem':
item = BookmarkItem(**item_params)
@ -514,14 +517,11 @@ def _process_list_response(provider,
item.callback = yt_item.pop('_callback')
do_callbacks = True
if isinstance(item, MediaItem):
# Set track number from playlist, or set to current list length to
if not item.get_special_sort():
# Set track number from playlist, or set to current list position to
# match "Default" (unsorted) sort order
if kind_type == 'playlistitem':
position = snippet.get('position') or len(items)
else:
position = len(items)
item.set_track_number(position + 1)
item.set_track_number(snippet.get('position', position) + 1)
position += 1
items.append(item)

View file

@ -56,6 +56,7 @@ def _do_login(provider, context, client=None, **kwargs):
access_manager = context.get_access_manager()
addon_id = context.get_param('addon_id', None)
localize = context.localize
function_cache = context.get_function_cache()
ui = context.get_ui()
ui.on_ok(localize('sign.multi.title'), localize('sign.multi.text'))
@ -83,6 +84,13 @@ def _do_login(provider, context, client=None, **kwargs):
except IndexError:
pass
if not function_cache.run(
client.internet_available,
function_cache.ONE_MINUTE * 5,
_refresh=True,
):
break
new_token = ('', expiry_timestamp, '')
try:
json_data = client.request_device_and_user_code(token_idx)

View file

@ -74,10 +74,9 @@ def _play_stream(provider, context):
ask_for_quality = settings.ask_for_video_quality()
if ui.pop_property(PLAY_PROMPT_QUALITY) and not screensaver:
ask_for_quality = True
audio_only = not ask_for_quality and settings.audio_only()
if ui.pop_property(PLAY_FORCE_AUDIO):
audio_only = True
else:
audio_only = settings.audio_only()
use_mpd = ((not is_external or settings.alternative_player_mpd())
and settings.use_mpd_videos()
and context.ipc_exec(SERVER_WAKEUP, timeout=5))

View file

@ -544,6 +544,7 @@ def _process_my_subscriptions(provider,
'name': context.localize('my_subscriptions'),
'uri': context.create_uri(my_subscriptions_path),
'image': '{media}/new_uploads.png',
'special_sort': 'top',
},
},
None
@ -556,6 +557,7 @@ def _process_my_subscriptions(provider,
(my_subscriptions_path, 'shorts')
),
'image': '{media}/shorts.png',
'special_sort': 'top',
},
},
None
@ -568,6 +570,7 @@ def _process_my_subscriptions(provider,
(my_subscriptions_path, 'live')
),
'image': '{media}/live.png',
'special_sort': 'top',
},
},
],

View file

@ -135,7 +135,7 @@ class Provider(AbstractProvider):
)
self._client.reinit(**kwargs)
def get_client(self, context):
def get_client(self, context, refresh=False):
access_manager = context.get_access_manager()
api_store = context.get_api_store()
settings = context.get_settings()
@ -262,8 +262,15 @@ class Provider(AbstractProvider):
access_manager.update_access_token(dev_id, access_token='')
return client
with client:
# create new access tokens
with client:
function_cache = context.get_function_cache()
if not function_cache.run(
client.internet_available,
function_cache.ONE_MINUTE * 5,
_refresh=refresh or context.refresh_requested(),
):
num_refresh_tokens = 0
if num_refresh_tokens and num_access_tokens != num_refresh_tokens:
access_tokens = [None, None, None, None]
token_expiry = 0
@ -400,6 +407,9 @@ class Provider(AbstractProvider):
},
'_available': True,
'_partial': True,
'_params': {
'special_sort': 'top',
},
},
{
'kind': 'youtube#playlistShortsFolder',
@ -412,6 +422,9 @@ class Provider(AbstractProvider):
}},
},
'_partial': True,
'_params': {
'special_sort': 'top',
},
} if not params.get(HIDE_SHORTS) else None,
{
'kind': 'youtube#playlistLiveFolder',
@ -424,6 +437,9 @@ class Provider(AbstractProvider):
}},
},
'_partial': True,
'_params': {
'special_sort': 'top',
},
} if not params.get(HIDE_LIVE) else None,
]
else:
@ -760,6 +776,7 @@ class Provider(AbstractProvider):
'title': context.localize('playlists'),
'image': '{media}/playlist.png',
CHANNEL_ID: channel_id,
'special_sort': 'top',
},
} if not params.get(HIDE_PLAYLISTS) else None,
{
@ -768,6 +785,7 @@ class Provider(AbstractProvider):
'title': context.localize('search'),
'image': '{media}/search.png',
CHANNEL_ID: channel_id,
'special_sort': 'top',
},
} if not params.get(HIDE_SEARCH) else None,
{
@ -781,6 +799,9 @@ class Provider(AbstractProvider):
}},
},
'_partial': True,
'_params': {
'special_sort': 'top',
},
} if uploads and not params.get(HIDE_SHORTS) else None,
{
'kind': 'youtube#playlistLiveFolder',
@ -793,6 +814,9 @@ class Provider(AbstractProvider):
}},
},
'_partial': True,
'_params': {
'special_sort': 'top',
},
} if uploads and not params.get(HIDE_LIVE) else None,
{
'kind': 'youtube#playlistMembersFolder',
@ -805,6 +829,9 @@ class Provider(AbstractProvider):
}},
},
'_partial': True,
'_params': {
'special_sort': 'top',
},
} if uploads and not params.get(HIDE_MEMBERS) else None,
],
}
@ -937,7 +964,7 @@ class Provider(AbstractProvider):
re_match.group('mode'),
provider,
context,
client=provider.get_client(context),
client=provider.get_client(context, refresh=True),
)
def _search_channel_or_playlist(self,