mirror of
https://github.com/anxdpanic/plugin.video.youtube.git
synced 2025-12-05 18:20:41 -08:00
Compare commits
21 commits
d5e9dfa56f
...
ed3f53c60b
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ed3f53c60b | ||
|
|
0db45cb923 | ||
|
|
13a4907262 | ||
|
|
a9f0436deb | ||
|
|
b585725a04 | ||
|
|
86008ba189 | ||
|
|
7472956964 | ||
|
|
eee33347f3 | ||
|
|
a710a77576 | ||
|
|
419c37ddd1 | ||
|
|
154c4db740 | ||
|
|
04ba84da63 | ||
|
|
be8237a5e1 | ||
|
|
3d2f20eb4a | ||
|
|
a7e2b3f8ca | ||
|
|
417e054b84 | ||
|
|
1e4d61fac2 | ||
|
|
37b898a0c4 | ||
|
|
36e2809028 | ||
|
|
0bea79e9f4 | ||
|
|
ddc7b4900b |
23 changed files with 582 additions and 307 deletions
|
|
@ -1,5 +1,5 @@
|
|||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<addon id="plugin.video.youtube" name="YouTube" version="7.3.0+beta.8" provider-name="anxdpanic, bromix, MoojMidge">
|
||||
<addon id="plugin.video.youtube" name="YouTube" version="7.3.0+beta.9" provider-name="anxdpanic, bromix, MoojMidge">
|
||||
<requires>
|
||||
<import addon="xbmc.python" version="3.0.0"/>
|
||||
<import addon="script.module.requests" version="2.27.1"/>
|
||||
|
|
|
|||
|
|
@ -1,3 +1,17 @@
|
|||
## v7.3.0+beta.9
|
||||
### Fixed
|
||||
- Disable label masks being used in Kodi 18 #1327
|
||||
- Python 2 compatibility workaround for lack of timeout when trying to acquire an RLock #1327
|
||||
- More expansive handling of inconsistent urllib3 exception re-raising
|
||||
|
||||
### Changed
|
||||
- Improve robustness of fetching recommended and related videos
|
||||
- Improve workarounds for SQLite concurrency issues
|
||||
- Remove possibly invalid access token if an authentication error occurs
|
||||
- Better organise and use standard labels for http server address and port settings
|
||||
- Try to make http server IP address selection even more obvious when running Setup Wizard #1320
|
||||
- Improve logging of errors caused by localised strings that have been incorrectly translated
|
||||
|
||||
## v7.3.0+beta.8
|
||||
### Fixed
|
||||
- Fix regression in handling audio only setting after d154325c5b672dccc6a17413063cfdeb32256ffd
|
||||
|
|
|
|||
|
|
@ -878,7 +878,7 @@ msgid "Delete access_manager.json"
|
|||
msgstr ""
|
||||
|
||||
msgctxt "#30643"
|
||||
msgid "Listen on IP"
|
||||
msgid ""
|
||||
msgstr ""
|
||||
|
||||
msgctxt "#30644"
|
||||
|
|
|
|||
|
|
@ -399,7 +399,14 @@ class AbstractContext(object):
|
|||
|
||||
command = 'command://' if command else ''
|
||||
if run:
|
||||
return ''.join((command, 'RunPlugin(', uri, ')'))
|
||||
return ''.join((command,
|
||||
'RunAddon('
|
||||
if run == 'addon' else
|
||||
'RunScript('
|
||||
if run == 'script' else
|
||||
'RunPlugin(',
|
||||
uri,
|
||||
')'))
|
||||
if play is not None:
|
||||
return ''.join((
|
||||
command,
|
||||
|
|
|
|||
|
|
@ -10,9 +10,9 @@
|
|||
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
import atexit
|
||||
import json
|
||||
import sys
|
||||
from atexit import register as atexit_register
|
||||
from timeit import default_timer
|
||||
from weakref import proxy
|
||||
|
||||
|
|
@ -461,7 +461,7 @@ class XbmcContext(AbstractContext):
|
|||
self._ui = None
|
||||
self._playlist = None
|
||||
|
||||
atexit.register(self.tear_down)
|
||||
atexit_register(self.tear_down)
|
||||
|
||||
def init(self):
|
||||
num_args = len(sys.argv)
|
||||
|
|
@ -684,8 +684,9 @@ class XbmcContext(AbstractContext):
|
|||
return result % _args
|
||||
except TypeError:
|
||||
self.log.exception(('Localization error',
|
||||
'text_id: {text_id!r}',
|
||||
'args: {original_args!r}'),
|
||||
'String: {result!r} ({text_id!r})',
|
||||
'args: {original_args!r}'),
|
||||
result=result,
|
||||
text_id=text_id,
|
||||
original_args=args)
|
||||
return result
|
||||
|
|
@ -743,13 +744,19 @@ class XbmcContext(AbstractContext):
|
|||
)
|
||||
|
||||
if current_system_version.compatible(19):
|
||||
def add_sort_method(self, sort_methods):
|
||||
def add_sort_method(self,
|
||||
sort_methods,
|
||||
_add_sort_method=xbmcplugin.addSortMethod):
|
||||
handle = self._plugin_handle
|
||||
for sort_method in sort_methods:
|
||||
xbmcplugin.addSortMethod(self._plugin_handle, *sort_method)
|
||||
_add_sort_method(handle, *sort_method)
|
||||
else:
|
||||
def add_sort_method(self, sort_methods):
|
||||
def add_sort_method(self,
|
||||
sort_methods,
|
||||
_add_sort_method=xbmcplugin.addSortMethod):
|
||||
handle = self._plugin_handle
|
||||
for sort_method in sort_methods:
|
||||
xbmcplugin.addSortMethod(self._plugin_handle, *sort_method[:2])
|
||||
_add_sort_method(handle, *sort_method[:3:2])
|
||||
|
||||
def clone(self, new_path=None, new_params=None):
|
||||
if not new_path:
|
||||
|
|
|
|||
|
|
@ -181,8 +181,11 @@ class RequestHandler(BaseHTTPRequestHandler, object):
|
|||
return
|
||||
except (HTTPError, OSError) as exc:
|
||||
self.close_connection = True
|
||||
if exc.errno not in self.SWALLOWED_ERRORS:
|
||||
raise exc
|
||||
self.log.exception('Request failed')
|
||||
if (isinstance(exc, HTTPError)
|
||||
or getattr(exc, 'errno', None) in self.SWALLOWED_ERRORS):
|
||||
return
|
||||
raise exc
|
||||
|
||||
def ip_address_status(self, ip_address):
|
||||
is_whitelisted = ip_address in self.whitelist_ips
|
||||
|
|
@ -413,7 +416,7 @@ class RequestHandler(BaseHTTPRequestHandler, object):
|
|||
'list': priority_list,
|
||||
}
|
||||
elif original_path == '/api/timedtext':
|
||||
stream_type = (params.get('type', empty)[0],
|
||||
stream_type = (params.get('type', ['track'])[0],
|
||||
params.get('fmt', empty)[0],
|
||||
params.get('kind', empty)[0])
|
||||
priority_list = []
|
||||
|
|
|
|||
|
|
@ -9,8 +9,8 @@
|
|||
|
||||
from __future__ import absolute_import, division, unicode_literals
|
||||
|
||||
import atexit
|
||||
import socket
|
||||
from atexit import register as atexit_register
|
||||
|
||||
from requests import Request, Session
|
||||
from requests.adapters import HTTPAdapter, Retry
|
||||
|
|
@ -79,7 +79,7 @@ class BaseRequestsClass(object):
|
|||
allowed_methods=None,
|
||||
)
|
||||
))
|
||||
atexit.register(_session.close)
|
||||
atexit_register(_session.close)
|
||||
|
||||
_context = None
|
||||
_verify = True
|
||||
|
|
@ -390,27 +390,28 @@ class BaseRequestsClass(object):
|
|||
raise raise_exc
|
||||
raise exc
|
||||
|
||||
if cache:
|
||||
if cached_response is not None:
|
||||
self.log.debug(('Using cached response',
|
||||
'Request ID: {request_id}',
|
||||
'Etag: {etag}',
|
||||
'Modified: {timestamp}'),
|
||||
request_id=request_id,
|
||||
etag=etag,
|
||||
timestamp=timestamp,
|
||||
stacklevel=stacklevel)
|
||||
cache.set(request_id)
|
||||
response = cached_response
|
||||
elif response is not None:
|
||||
self.log.debug(('Saving response to cache',
|
||||
'Request ID: {request_id}',
|
||||
'Etag: {etag}',
|
||||
'Modified: {timestamp}'),
|
||||
request_id=request_id,
|
||||
etag=etag,
|
||||
timestamp=timestamp,
|
||||
stacklevel=stacklevel)
|
||||
cache.set(request_id, response, etag)
|
||||
if not cache:
|
||||
pass
|
||||
elif cached_response is not None:
|
||||
self.log.debug(('Using cached response',
|
||||
'Request ID: {request_id}',
|
||||
'Etag: {etag}',
|
||||
'Modified: {timestamp}'),
|
||||
request_id=request_id,
|
||||
etag=etag,
|
||||
timestamp=timestamp,
|
||||
stacklevel=stacklevel)
|
||||
cache.set(request_id)
|
||||
response = cached_response
|
||||
elif response is not None:
|
||||
self.log.debug(('Saving response to cache',
|
||||
'Request ID: {request_id}',
|
||||
'Etag: {etag}',
|
||||
'Modified: {timestamp}'),
|
||||
request_id=request_id,
|
||||
etag=etag,
|
||||
timestamp=timestamp,
|
||||
stacklevel=stacklevel)
|
||||
cache.set(request_id, response, etag)
|
||||
|
||||
return response
|
||||
|
|
|
|||
|
|
@ -464,7 +464,8 @@ class AbstractSettings(object):
|
|||
ip_address = '.'.join(map(str, octets))
|
||||
|
||||
if value is not None:
|
||||
return self.set_string(SETTINGS.HTTPD_LISTEN, ip_address)
|
||||
if not self.set_string(SETTINGS.HTTPD_LISTEN, ip_address):
|
||||
return False
|
||||
return ip_address
|
||||
|
||||
def httpd_whitelist(self):
|
||||
|
|
|
|||
|
|
@ -18,6 +18,8 @@ class DataCache(Storage):
|
|||
_table_updated = False
|
||||
_sql = {}
|
||||
|
||||
memory_store = {}
|
||||
|
||||
def __init__(self, filepath, max_file_size_mb=5):
|
||||
max_file_size_kb = max_file_size_mb * 1024
|
||||
super(DataCache, self).__init__(filepath,
|
||||
|
|
@ -27,25 +29,11 @@ class DataCache(Storage):
|
|||
content_ids,
|
||||
seconds=None,
|
||||
as_dict=True,
|
||||
values_only=True,
|
||||
memory_store=None):
|
||||
if memory_store:
|
||||
in_memory_result = {}
|
||||
_content_ids = []
|
||||
for key in content_ids:
|
||||
if key in memory_store:
|
||||
in_memory_result[key] = memory_store[key]
|
||||
else:
|
||||
_content_ids.append(key)
|
||||
content_ids = _content_ids
|
||||
else:
|
||||
in_memory_result = None
|
||||
values_only=True):
|
||||
result = self._get_by_ids(content_ids,
|
||||
seconds=seconds,
|
||||
as_dict=as_dict,
|
||||
values_only=values_only)
|
||||
if in_memory_result:
|
||||
result.update(in_memory_result)
|
||||
return result
|
||||
|
||||
def get_items_like(self, content_id, seconds=None):
|
||||
|
|
@ -70,12 +58,11 @@ class DataCache(Storage):
|
|||
result = self._get(content_id, seconds=seconds, as_dict=as_dict)
|
||||
return result
|
||||
|
||||
def set_item(self, content_id, item):
|
||||
self._set(content_id, item)
|
||||
def set_item(self, content_id, item, defer=False, flush=False):
|
||||
self._set(content_id, item, defer=defer, flush=flush)
|
||||
|
||||
def set_items(self, items):
|
||||
self._set_many(items)
|
||||
self._optimize_file_size()
|
||||
def set_items(self, items, defer=False, flush=False):
|
||||
self._set_many(items, defer=defer, flush=flush)
|
||||
|
||||
def del_item(self, content_id):
|
||||
self._remove(content_id)
|
||||
|
|
|
|||
|
|
@ -22,6 +22,8 @@ class FunctionCache(Storage):
|
|||
_table_updated = False
|
||||
_sql = {}
|
||||
|
||||
memory_store = {}
|
||||
|
||||
_BUILTIN = str.__module__
|
||||
SCOPE_NONE = 0
|
||||
SCOPE_BUILTINS = 1
|
||||
|
|
@ -134,7 +136,7 @@ class FunctionCache(Storage):
|
|||
if callable(process):
|
||||
data = process(data, _data)
|
||||
if data != ignore_value:
|
||||
self._set(cache_id, data)
|
||||
self._set(cache_id, data, defer=True)
|
||||
elif oneshot:
|
||||
self._remove(cache_id)
|
||||
|
||||
|
|
|
|||
|
|
@ -40,8 +40,8 @@ class PlaybackHistory(Storage):
|
|||
result = self._get(key, process=self._add_last_played)
|
||||
return result
|
||||
|
||||
def set_item(self, video_id, play_data, timestamp=None):
|
||||
self._set(video_id, play_data, timestamp)
|
||||
def set_item(self, video_id, play_data):
|
||||
self._set(video_id, play_data)
|
||||
|
||||
def del_item(self, video_id):
|
||||
self._remove(video_id)
|
||||
|
|
|
|||
|
|
@ -18,6 +18,8 @@ class RequestCache(Storage):
|
|||
_table_updated = False
|
||||
_sql = {}
|
||||
|
||||
memory_store = {}
|
||||
|
||||
def __init__(self, filepath, max_file_size_mb=20):
|
||||
max_file_size_kb = max_file_size_mb * 1024
|
||||
super(RequestCache, self).__init__(filepath,
|
||||
|
|
@ -36,8 +38,7 @@ class RequestCache(Storage):
|
|||
if timestamp:
|
||||
self._update(request_id, item, timestamp)
|
||||
else:
|
||||
self._set(request_id, item)
|
||||
self._optimize_file_size()
|
||||
self._set(request_id, item, defer=True)
|
||||
else:
|
||||
self._refresh(request_id, timestamp)
|
||||
|
||||
|
|
|
|||
|
|
@ -13,12 +13,14 @@ from __future__ import absolute_import, division, unicode_literals
|
|||
import os
|
||||
import sqlite3
|
||||
import time
|
||||
from atexit import register as atexit_register
|
||||
from threading import RLock, Timer
|
||||
|
||||
from .. import logging
|
||||
from ..compatibility import pickle, to_str
|
||||
from ..utils.datetime import fromtimestamp, since_epoch
|
||||
from ..utils.file_system import make_dirs
|
||||
from ..utils.system_version import current_system_version
|
||||
|
||||
|
||||
class StorageLock(object):
|
||||
|
|
@ -27,11 +29,18 @@ class StorageLock(object):
|
|||
self._num_accessing = 0
|
||||
self._num_waiting = 0
|
||||
|
||||
def __enter__(self):
|
||||
self._num_waiting += 1
|
||||
locked = not self._lock.acquire(timeout=3)
|
||||
self._num_waiting -= 1
|
||||
return locked
|
||||
if current_system_version.compatible(19):
|
||||
def __enter__(self):
|
||||
self._num_waiting += 1
|
||||
locked = not self._lock.acquire(timeout=3)
|
||||
self._num_waiting -= 1
|
||||
return locked
|
||||
else:
|
||||
def __enter__(self):
|
||||
self._num_waiting += 1
|
||||
locked = not self._lock.acquire(blocking=False)
|
||||
self._num_waiting -= 1
|
||||
return locked
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
try:
|
||||
|
|
@ -40,16 +49,13 @@ class StorageLock(object):
|
|||
pass
|
||||
|
||||
def accessing(self, start=False, done=False):
|
||||
if start:
|
||||
self._num_accessing += 1
|
||||
elif done:
|
||||
self._num_accessing -= 1
|
||||
num = self._num_accessing
|
||||
if num > 0:
|
||||
return True
|
||||
if num < 0:
|
||||
self._num_accessing = 0
|
||||
return False
|
||||
if start:
|
||||
num += 1
|
||||
elif done and num > 0:
|
||||
num -= 1
|
||||
self._num_accessing = num
|
||||
return num > 0
|
||||
|
||||
def waiting(self):
|
||||
return self._num_waiting > 0
|
||||
|
|
@ -225,8 +231,10 @@ class Storage(object):
|
|||
self._db = None
|
||||
self._lock = StorageLock()
|
||||
self._close_timer = None
|
||||
self._close_actions = False
|
||||
self._max_item_count = -1 if migrate else max_item_count
|
||||
self._max_file_size_kb = -1 if migrate else max_file_size_kb
|
||||
atexit_register(self._close, event='shutdown')
|
||||
|
||||
if migrate:
|
||||
self._base = self
|
||||
|
|
@ -264,14 +272,22 @@ class Storage(object):
|
|||
def set_max_file_size_kb(self, max_file_size_kb):
|
||||
self._max_file_size_kb = max_file_size_kb
|
||||
|
||||
def __del__(self):
|
||||
self._close(event='deleted')
|
||||
|
||||
def __enter__(self):
|
||||
self._lock.accessing(start=True)
|
||||
|
||||
close_timer = self._close_timer
|
||||
if close_timer:
|
||||
close_timer.cancel()
|
||||
self._close_timer = None
|
||||
self._lock.accessing(start=True)
|
||||
|
||||
db = self._db or self._open()
|
||||
cursor = db.cursor()
|
||||
try:
|
||||
cursor = db.cursor()
|
||||
except (AttributeError, sqlite3.ProgrammingError):
|
||||
db = self._open()
|
||||
cursor = db.cursor()
|
||||
cursor.arraysize = 100
|
||||
return db, cursor
|
||||
|
||||
|
|
@ -279,13 +295,16 @@ class Storage(object):
|
|||
close_timer = self._close_timer
|
||||
if close_timer:
|
||||
close_timer.cancel()
|
||||
if not self._lock.accessing(done=True) and not self._lock.waiting():
|
||||
|
||||
if self._lock.accessing(done=True) or self._lock.waiting():
|
||||
return
|
||||
|
||||
with self._lock as locked:
|
||||
if locked or self._close_timer:
|
||||
return
|
||||
close_timer = Timer(5, self._close)
|
||||
close_timer.daemon = True
|
||||
close_timer.start()
|
||||
self._close_timer = close_timer
|
||||
else:
|
||||
self._close_timer = None
|
||||
|
||||
def _open(self):
|
||||
statements = []
|
||||
|
|
@ -299,7 +318,7 @@ class Storage(object):
|
|||
for attempt in range(1, 4):
|
||||
try:
|
||||
db = sqlite3.connect(self._filepath,
|
||||
# cached_statements=0,
|
||||
cached_statements=0,
|
||||
check_same_thread=False,
|
||||
isolation_level=None)
|
||||
break
|
||||
|
|
@ -354,16 +373,38 @@ class Storage(object):
|
|||
self._db = db
|
||||
return db
|
||||
|
||||
def _close(self, commit=False):
|
||||
db = self._db
|
||||
if not db or self._lock.accessing() or self._lock.waiting():
|
||||
def _close(self, commit=False, event=None):
|
||||
close_timer = self._close_timer
|
||||
if close_timer:
|
||||
close_timer.cancel()
|
||||
|
||||
if self._lock.accessing() or self._lock.waiting():
|
||||
return False
|
||||
|
||||
db = self._db
|
||||
if not db and self._close_actions:
|
||||
db = self._open()
|
||||
else:
|
||||
return None
|
||||
|
||||
if self._close_actions:
|
||||
memory_store = getattr(self, 'memory_store', None)
|
||||
if memory_store:
|
||||
self._set_many(items=None, memory_store=memory_store)
|
||||
self._optimize_item_count()
|
||||
self._optimize_file_size()
|
||||
self._close_actions = False
|
||||
|
||||
self._execute(db.cursor(), 'PRAGMA optimize')
|
||||
|
||||
# Not needed if using db as a context manager
|
||||
if commit:
|
||||
db.commit()
|
||||
db.close()
|
||||
self._db = None
|
||||
|
||||
if event:
|
||||
db.close()
|
||||
self._db = None
|
||||
self._close_timer = None
|
||||
return True
|
||||
|
||||
def _execute(self, cursor, query, values=None, many=False, script=False):
|
||||
|
|
@ -393,9 +434,9 @@ class Storage(object):
|
|||
else:
|
||||
self.log.exception('Failed')
|
||||
break
|
||||
self.log.warning('Attempt %d of 3',
|
||||
attempt,
|
||||
exc_info=True)
|
||||
self.log.warning_trace('Attempt %d of 3',
|
||||
attempt,
|
||||
exc_info=True)
|
||||
else:
|
||||
self.log.exception('Failed')
|
||||
return []
|
||||
|
|
@ -424,12 +465,18 @@ class Storage(object):
|
|||
query = self._sql['prune_by_size'].format(prune_size)
|
||||
if defer:
|
||||
return query
|
||||
with self._lock as locked, self as (db, cursor), db:
|
||||
if locked:
|
||||
return False
|
||||
self._execute(cursor, query)
|
||||
self._execute(cursor, 'VACUUM')
|
||||
return True
|
||||
with self as (db, cursor), db:
|
||||
self._execute(
|
||||
cursor,
|
||||
'\n'.join((
|
||||
'BEGIN IMMEDIATE;',
|
||||
query,
|
||||
'COMMIT;',
|
||||
'VACUUM;',
|
||||
)),
|
||||
script=True,
|
||||
)
|
||||
return None
|
||||
|
||||
def _optimize_item_count(self, limit=-1, defer=False):
|
||||
# do nothing - optimize only if max item limit has been set
|
||||
|
|
@ -447,26 +494,71 @@ class Storage(object):
|
|||
)
|
||||
if defer:
|
||||
return query
|
||||
with self._lock as locked, self as (db, cursor), db:
|
||||
if locked:
|
||||
with self as (db, cursor), db:
|
||||
self._execute(
|
||||
cursor,
|
||||
'\n'.join((
|
||||
'BEGIN IMMEDIATE;',
|
||||
query,
|
||||
'COMMIT;',
|
||||
'VACUUM;',
|
||||
)),
|
||||
script=True,
|
||||
)
|
||||
return None
|
||||
|
||||
def _set(self, item_id, item, defer=False, flush=False, memory_store=None):
|
||||
if memory_store is None:
|
||||
memory_store = getattr(self, 'memory_store', None)
|
||||
if memory_store is not None:
|
||||
if defer:
|
||||
memory_store[item_id] = item
|
||||
self._close_actions = True
|
||||
return None
|
||||
if flush:
|
||||
memory_store.clear()
|
||||
return False
|
||||
self._execute(cursor, query)
|
||||
self._execute(cursor, 'VACUUM')
|
||||
if memory_store:
|
||||
memory_store[item_id] = item
|
||||
return self._set_many(items=None, memory_store=memory_store)
|
||||
|
||||
values = self._encode(item_id, item)
|
||||
with self as (db, cursor), db:
|
||||
self._execute(
|
||||
cursor,
|
||||
'\n'.join((
|
||||
'BEGIN IMMEDIATE;',
|
||||
self._sql['set'],
|
||||
'COMMIT;',
|
||||
)),
|
||||
values,
|
||||
script=True,
|
||||
)
|
||||
self._close_actions = True
|
||||
return True
|
||||
|
||||
def _set(self, item_id, item, timestamp=None):
|
||||
values = self._encode(item_id, item, timestamp)
|
||||
optimize_query = self._optimize_item_count(1, defer=True)
|
||||
with self._lock as locked, self as (db, cursor), db:
|
||||
if locked:
|
||||
def _set_many(self,
|
||||
items,
|
||||
flatten=False,
|
||||
defer=False,
|
||||
flush=False,
|
||||
memory_store=None):
|
||||
if memory_store is None:
|
||||
memory_store = getattr(self, 'memory_store', None)
|
||||
if memory_store is not None:
|
||||
if defer:
|
||||
memory_store.update(items)
|
||||
self._close_actions = True
|
||||
return None
|
||||
if flush:
|
||||
memory_store.clear()
|
||||
return False
|
||||
if optimize_query:
|
||||
self._execute(cursor, 'BEGIN')
|
||||
self._execute(cursor, optimize_query)
|
||||
self._execute(cursor, self._sql['set'], values=values)
|
||||
return True
|
||||
if memory_store:
|
||||
if items:
|
||||
memory_store.update(items)
|
||||
items = memory_store
|
||||
flush = True
|
||||
|
||||
def _set_many(self, items, flatten=False):
|
||||
now = since_epoch()
|
||||
num_items = len(items)
|
||||
|
||||
|
|
@ -482,42 +574,73 @@ class Storage(object):
|
|||
for item in items.items()]
|
||||
query = self._sql['set']
|
||||
|
||||
optimize_query = self._optimize_item_count(num_items, defer=True)
|
||||
with self._lock as locked, self as (db, cursor), db:
|
||||
if locked:
|
||||
return False
|
||||
if optimize_query:
|
||||
self._execute(cursor, 'BEGIN')
|
||||
self._execute(cursor, optimize_query)
|
||||
self._execute(cursor, query, many=(not flatten), values=values)
|
||||
with self as (db, cursor), db:
|
||||
if flatten:
|
||||
self._execute(
|
||||
cursor,
|
||||
'\n'.join((
|
||||
'BEGIN IMMEDIATE;',
|
||||
query,
|
||||
'COMMIT;',
|
||||
)),
|
||||
values,
|
||||
script=True,
|
||||
)
|
||||
else:
|
||||
self._execute(cursor, 'BEGIN IMMEDIATE')
|
||||
self._execute(cursor, query, many=True, values=values)
|
||||
self._close_actions = True
|
||||
|
||||
if flush:
|
||||
memory_store.clear()
|
||||
return True
|
||||
|
||||
def _refresh(self, item_id, timestamp=None):
|
||||
values = (timestamp or since_epoch(), to_str(item_id))
|
||||
with self._lock as locked, self as (db, cursor), db:
|
||||
if locked:
|
||||
return False
|
||||
self._execute(cursor, self._sql['refresh'], values=values)
|
||||
with self as (db, cursor), db:
|
||||
self._execute(
|
||||
cursor,
|
||||
'\n'.join((
|
||||
'BEGIN IMMEDIATE;',
|
||||
self._sql['refresh'],
|
||||
'COMMIT;',
|
||||
)),
|
||||
values,
|
||||
script=True,
|
||||
)
|
||||
return True
|
||||
|
||||
def _update(self, item_id, item, timestamp=None):
|
||||
values = self._encode(item_id, item, timestamp, for_update=True)
|
||||
with self._lock as locked, self as (db, cursor), db:
|
||||
if locked:
|
||||
return False
|
||||
self._execute(cursor, self._sql['update'], values=values)
|
||||
with self as (db, cursor), db:
|
||||
self._execute(
|
||||
cursor,
|
||||
'\n'.join((
|
||||
'BEGIN IMMEDIATE;',
|
||||
self._sql['update'],
|
||||
'COMMIT;',
|
||||
)),
|
||||
values,
|
||||
script=True,
|
||||
)
|
||||
return True
|
||||
|
||||
def clear(self, defer=False):
|
||||
query = self._sql['clear']
|
||||
if defer:
|
||||
return query
|
||||
with self._lock as locked, self as (db, cursor), db:
|
||||
if locked:
|
||||
return False
|
||||
self._execute(cursor, query)
|
||||
self._execute(cursor, 'VACUUM')
|
||||
return True
|
||||
with self as (db, cursor), db:
|
||||
self._execute(
|
||||
cursor,
|
||||
'\n'.join((
|
||||
'BEGIN IMMEDIATE;',
|
||||
query,
|
||||
'COMMIT;',
|
||||
'VACUUM;',
|
||||
)),
|
||||
script=True,
|
||||
)
|
||||
return None
|
||||
|
||||
def is_empty(self):
|
||||
with self as (db, cursor):
|
||||
|
|
@ -531,7 +654,10 @@ class Storage(object):
|
|||
|
||||
@staticmethod
|
||||
def _decode(obj, process=None, item=None):
|
||||
decoded_obj = pickle.loads(obj)
|
||||
if item and item[3] is None:
|
||||
decoded_obj = obj
|
||||
else:
|
||||
decoded_obj = pickle.loads(obj)
|
||||
if process:
|
||||
return process(decoded_obj, item)
|
||||
return decoded_obj
|
||||
|
|
@ -579,6 +705,10 @@ class Storage(object):
|
|||
def _get_by_ids(self, item_ids=None, oldest_first=True, limit=-1,
|
||||
wildcard=False, seconds=None, process=None,
|
||||
as_dict=False, values_only=True, excluding=None):
|
||||
epoch = since_epoch()
|
||||
cut_off = epoch - seconds if seconds else 0
|
||||
in_memory_result = None
|
||||
|
||||
if not item_ids:
|
||||
if oldest_first:
|
||||
query = self._sql['get_many']
|
||||
|
|
@ -599,58 +729,100 @@ class Storage(object):
|
|||
)
|
||||
item_ids = tuple(item_ids) + tuple(excluding)
|
||||
else:
|
||||
query = self._sql['get_by_key'].format(
|
||||
'?,' * (len(item_ids) - 1) + '?'
|
||||
)
|
||||
item_ids = tuple(item_ids)
|
||||
|
||||
epoch = since_epoch()
|
||||
cut_off = epoch - seconds if seconds else 0
|
||||
with self as (db, cursor):
|
||||
result = self._execute(cursor, query, item_ids)
|
||||
if not result:
|
||||
pass
|
||||
elif as_dict:
|
||||
if values_only:
|
||||
result = {
|
||||
item[0]: self._decode(item[2], process, item)
|
||||
for item in result if not cut_off or item[1] >= cut_off
|
||||
}
|
||||
memory_store = getattr(self, 'memory_store', None)
|
||||
if memory_store:
|
||||
in_memory_result = []
|
||||
_item_ids = []
|
||||
for key in item_ids:
|
||||
if key in memory_store:
|
||||
in_memory_result.append((
|
||||
key,
|
||||
epoch,
|
||||
memory_store[key],
|
||||
None,
|
||||
))
|
||||
else:
|
||||
_item_ids.append(key)
|
||||
item_ids = _item_ids
|
||||
else:
|
||||
result = {
|
||||
item[0]: {
|
||||
'age': epoch - item[1],
|
||||
'value': self._decode(item[2], process, item),
|
||||
}
|
||||
for item in result if not cut_off or item[1] >= cut_off
|
||||
}
|
||||
elif values_only:
|
||||
result = [
|
||||
self._decode(item[2], process, item)
|
||||
in_memory_result = None
|
||||
|
||||
if item_ids:
|
||||
query = self._sql['get_by_key'].format(
|
||||
'?,' * (len(item_ids) - 1) + '?'
|
||||
)
|
||||
item_ids = tuple(item_ids)
|
||||
else:
|
||||
query = None
|
||||
|
||||
if query:
|
||||
with self as (db, cursor):
|
||||
result = self._execute(cursor, query, item_ids)
|
||||
if result:
|
||||
result = result.fetchall()
|
||||
else:
|
||||
result = None
|
||||
|
||||
if in_memory_result:
|
||||
if result:
|
||||
in_memory_result.extend(result)
|
||||
result = in_memory_result
|
||||
|
||||
if as_dict:
|
||||
if values_only:
|
||||
result = {
|
||||
item[0]: self._decode(item[2], process, item)
|
||||
for item in result if not cut_off or item[1] >= cut_off
|
||||
]
|
||||
}
|
||||
else:
|
||||
result = [
|
||||
(item[0],
|
||||
fromtimestamp(item[1]),
|
||||
self._decode(item[2], process, item))
|
||||
result = {
|
||||
item[0]: {
|
||||
'age': epoch - item[1],
|
||||
'value': self._decode(item[2], process, item),
|
||||
}
|
||||
for item in result if not cut_off or item[1] >= cut_off
|
||||
]
|
||||
}
|
||||
elif values_only:
|
||||
result = [
|
||||
self._decode(item[2], process, item)
|
||||
for item in result if not cut_off or item[1] >= cut_off
|
||||
]
|
||||
else:
|
||||
result = [
|
||||
(item[0],
|
||||
fromtimestamp(item[1]),
|
||||
self._decode(item[2], process, item))
|
||||
for item in result if not cut_off or item[1] >= cut_off
|
||||
]
|
||||
return result
|
||||
|
||||
def _remove(self, item_id):
|
||||
with self._lock as locked, self as (db, cursor), db:
|
||||
if locked:
|
||||
return False
|
||||
self._execute(cursor, self._sql['remove'], [item_id])
|
||||
with self as (db, cursor), db:
|
||||
self._execute(
|
||||
cursor,
|
||||
'\n'.join((
|
||||
'BEGIN IMMEDIATE;',
|
||||
self._sql['remove'],
|
||||
'COMMIT;',
|
||||
)),
|
||||
[item_id],
|
||||
script=True,
|
||||
)
|
||||
return True
|
||||
|
||||
def _remove_many(self, item_ids):
|
||||
num_ids = len(item_ids)
|
||||
query = self._sql['remove_by_key'].format('?,' * (num_ids - 1) + '?')
|
||||
with self._lock as locked, self as (db, cursor), db:
|
||||
if locked:
|
||||
return False
|
||||
self._execute(cursor, query, tuple(item_ids))
|
||||
self._execute(cursor, 'VACUUM')
|
||||
with self as (db, cursor), db:
|
||||
self._execute(
|
||||
cursor,
|
||||
'\n'.join((
|
||||
'BEGIN IMMEDIATE;',
|
||||
query,
|
||||
'COMMIT;',
|
||||
'VACUUM;',
|
||||
)),
|
||||
tuple(item_ids),
|
||||
script=True,
|
||||
)
|
||||
return True
|
||||
|
|
|
|||
|
|
@ -89,23 +89,33 @@ class YouTubeDataClient(YouTubeLoginClient):
|
|||
'tvSurfaceContentRenderer',
|
||||
'content',
|
||||
'sectionListRenderer',
|
||||
'contents',
|
||||
0,
|
||||
'shelfRenderer',
|
||||
'content',
|
||||
'horizontalListRenderer',
|
||||
'continuations',
|
||||
0,
|
||||
'nextContinuationData',
|
||||
(
|
||||
(
|
||||
'contents',
|
||||
slice(None),
|
||||
None,
|
||||
'shelfRenderer',
|
||||
'content',
|
||||
('horizontalListRenderer', 'verticalListRenderer'),
|
||||
'continuations',
|
||||
0,
|
||||
'nextContinuationData',
|
||||
),
|
||||
(
|
||||
'continuations',
|
||||
0,
|
||||
'nextContinuationData'
|
||||
)
|
||||
),
|
||||
),
|
||||
'continuation_items': (
|
||||
'continuationContents',
|
||||
'horizontalListContinuation',
|
||||
('horizontalListContinuation', 'sectionListContinuation'),
|
||||
'items',
|
||||
),
|
||||
'continuation_continuation': (
|
||||
'continuationContents',
|
||||
'horizontalListContinuation',
|
||||
('horizontalListContinuation', 'sectionListContinuation'),
|
||||
'continuations',
|
||||
0,
|
||||
'nextContinuationData',
|
||||
|
|
@ -200,7 +210,7 @@ class YouTubeDataClient(YouTubeLoginClient):
|
|||
slice(None),
|
||||
'shelfRenderer',
|
||||
'content',
|
||||
'horizontalListRenderer',
|
||||
('horizontalListRenderer', 'verticalListRenderer'),
|
||||
'items',
|
||||
),
|
||||
'item_id': (
|
||||
|
|
@ -244,23 +254,43 @@ class YouTubeDataClient(YouTubeLoginClient):
|
|||
'tvSurfaceContentRenderer',
|
||||
'content',
|
||||
'sectionListRenderer',
|
||||
'contents',
|
||||
0,
|
||||
'shelfRenderer',
|
||||
'content',
|
||||
'horizontalListRenderer',
|
||||
'continuations',
|
||||
0,
|
||||
'nextContinuationData',
|
||||
(
|
||||
(
|
||||
'contents',
|
||||
slice(None),
|
||||
None,
|
||||
'shelfRenderer',
|
||||
'content',
|
||||
('horizontalListRenderer', 'verticalListRenderer'),
|
||||
'continuations',
|
||||
0,
|
||||
'nextContinuationData',
|
||||
),
|
||||
(
|
||||
'continuations',
|
||||
0,
|
||||
'nextContinuationData'
|
||||
)
|
||||
),
|
||||
),
|
||||
'continuation_items': (
|
||||
'continuationContents',
|
||||
'horizontalListContinuation',
|
||||
'items',
|
||||
('horizontalListContinuation', 'sectionListContinuation'),
|
||||
(
|
||||
('items',),
|
||||
(
|
||||
'contents',
|
||||
slice(None),
|
||||
'shelfRenderer',
|
||||
'content',
|
||||
('horizontalListRenderer', 'verticalListRenderer'),
|
||||
'items',
|
||||
),
|
||||
),
|
||||
),
|
||||
'continuation_continuation': (
|
||||
'continuationContents',
|
||||
'horizontalListContinuation',
|
||||
('horizontalListContinuation', 'sectionListContinuation'),
|
||||
'continuations',
|
||||
0,
|
||||
'nextContinuationData',
|
||||
|
|
@ -282,7 +312,11 @@ class YouTubeDataClient(YouTubeLoginClient):
|
|||
('horizontalListRenderer', 'verticalListRenderer'),
|
||||
'items',
|
||||
slice(None),
|
||||
('gridVideoRenderer', 'compactVideoRenderer'),
|
||||
(
|
||||
'gridVideoRenderer',
|
||||
'compactVideoRenderer',
|
||||
'tileRenderer',
|
||||
),
|
||||
# 'videoId',
|
||||
),
|
||||
'continuation': (
|
||||
|
|
@ -307,7 +341,11 @@ class YouTubeDataClient(YouTubeLoginClient):
|
|||
('horizontalListRenderer', 'verticalListRenderer'),
|
||||
'items',
|
||||
slice(None),
|
||||
('gridVideoRenderer', 'compactVideoRenderer'),
|
||||
(
|
||||
'gridVideoRenderer',
|
||||
'compactVideoRenderer',
|
||||
'tileRenderer',
|
||||
),
|
||||
# 'videoId',
|
||||
),
|
||||
'continuation_continuation': (
|
||||
|
|
@ -1686,7 +1724,7 @@ class YouTubeDataClient(YouTubeLoginClient):
|
|||
2,
|
||||
'shelfRenderer',
|
||||
'content',
|
||||
'horizontalListRenderer',
|
||||
('horizontalListRenderer', 'verticalListRenderer'),
|
||||
'items',
|
||||
) if retry == 2 else (
|
||||
'contents',
|
||||
|
|
@ -2956,22 +2994,34 @@ class YouTubeDataClient(YouTubeLoginClient):
|
|||
message = strip_html_from_text(details.get('message', 'Unknown error'))
|
||||
|
||||
if getattr(exc, 'notify', True):
|
||||
context = self._context
|
||||
ok_dialog = False
|
||||
if reason in {'accessNotConfigured', 'forbidden'}:
|
||||
notification = self._context.localize('key.requirement')
|
||||
notification = context.localize('key.requirement')
|
||||
ok_dialog = True
|
||||
elif reason == 'keyInvalid' and message == 'Bad Request':
|
||||
notification = self._context.localize('api.key.incorrect')
|
||||
notification = context.localize('api.key.incorrect')
|
||||
elif reason in {'quotaExceeded', 'dailyLimitExceeded'}:
|
||||
notification = message
|
||||
elif reason == 'authError':
|
||||
auth_type = kwargs.get('_auth_type')
|
||||
if auth_type:
|
||||
if auth_type in self._access_tokens:
|
||||
self._access_tokens[auth_type] = None
|
||||
self.set_access_token(self._access_tokens)
|
||||
context.get_access_manager().update_access_token(
|
||||
context.get_param('addon_id'),
|
||||
access_token=self.convert_access_tokens(to_list=True),
|
||||
)
|
||||
notification = message
|
||||
else:
|
||||
notification = message
|
||||
|
||||
title = ': '.join((self._context.get_name(), reason))
|
||||
title = ': '.join((context.get_name(), reason))
|
||||
if ok_dialog:
|
||||
self._context.get_ui().on_ok(title, notification)
|
||||
context.get_ui().on_ok(title, notification)
|
||||
else:
|
||||
self._context.get_ui().show_notification(notification, title)
|
||||
context.get_ui().show_notification(notification, title)
|
||||
|
||||
info = (
|
||||
'Reason: {error_reason}',
|
||||
|
|
|
|||
|
|
@ -72,9 +72,37 @@ class YouTubeLoginClient(YouTubeRequestClient):
|
|||
def reinit(self, **kwargs):
|
||||
super(YouTubeLoginClient, self).reinit(**kwargs)
|
||||
|
||||
@classmethod
|
||||
def convert_access_tokens(cls,
|
||||
access_tokens=None,
|
||||
to_dict=False,
|
||||
to_list=False):
|
||||
if access_tokens is None:
|
||||
access_tokens = cls._access_tokens
|
||||
if to_dict or isinstance(access_tokens, (list, tuple)):
|
||||
access_tokens = {
|
||||
cls.TOKEN_TYPES[token_idx]: token
|
||||
for token_idx, token in enumerate(access_tokens)
|
||||
if token and token_idx in cls.TOKEN_TYPES
|
||||
}
|
||||
elif to_list or isinstance(access_tokens, dict):
|
||||
_access_tokens = [None, None, None, None]
|
||||
for token_type, token in access_tokens.items():
|
||||
token_idx = cls.TOKEN_TYPES.get(token_type)
|
||||
if token_idx is None:
|
||||
continue
|
||||
_access_tokens[token_idx] = token
|
||||
access_tokens = _access_tokens
|
||||
return access_tokens
|
||||
|
||||
def set_access_token(self, access_tokens=None):
|
||||
existing_access_tokens = type(self)._access_tokens
|
||||
if access_tokens:
|
||||
if isinstance(access_tokens, (list, tuple)):
|
||||
access_tokens = self.convert_access_tokens(
|
||||
access_tokens,
|
||||
to_dict=True,
|
||||
)
|
||||
token_status = 0
|
||||
for token_type, token in existing_access_tokens.items():
|
||||
if token_type in access_tokens:
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ from base64 import urlsafe_b64encode
|
|||
from json import dumps as json_dumps, loads as json_loads
|
||||
from os import path as os_path
|
||||
from random import choice as random_choice
|
||||
from re import compile as re_compile
|
||||
from re import compile as re_compile, sub as re_sub
|
||||
|
||||
from .data_client import YouTubeDataClient
|
||||
from .subtitles import SUBTITLE_SELECTIONS, Subtitles
|
||||
|
|
@ -852,7 +852,6 @@ class YouTubePlayerClient(YouTubeDataClient):
|
|||
self._client_groups = (
|
||||
('custom', clients if clients else ()),
|
||||
('auth_enabled|initial_request|no_playable_streams', (
|
||||
'tv_embed',
|
||||
'tv_unplugged',
|
||||
'tv',
|
||||
)),
|
||||
|
|
@ -1136,12 +1135,22 @@ class YouTubePlayerClient(YouTubeDataClient):
|
|||
|
||||
headers = response['client']['headers']
|
||||
|
||||
if '?' in url:
|
||||
url += '&mpd_version=5'
|
||||
elif url.endswith('/'):
|
||||
url += 'mpd_version/5'
|
||||
url_components = urlsplit(url)
|
||||
if url_components.query:
|
||||
params = dict(parse_qs(url_components.query))
|
||||
params['mpd_version'] = ['7']
|
||||
url = url_components._replace(
|
||||
query=urlencode(params, doseq=True),
|
||||
).geturl()
|
||||
else:
|
||||
url += '/mpd_version/5'
|
||||
path = re_sub(
|
||||
r'/mpd_version/\d+|/?$',
|
||||
'/mpd_version/7',
|
||||
url_components.path,
|
||||
)
|
||||
url = url_components._replace(
|
||||
path=path,
|
||||
).geturl()
|
||||
|
||||
stream_list[itag] = self._get_stream_format(
|
||||
itag=itag,
|
||||
|
|
@ -1541,7 +1550,7 @@ class YouTubePlayerClient(YouTubeDataClient):
|
|||
'_visitor_data': self._visitor_data[self._visitor_data_key],
|
||||
}
|
||||
|
||||
for client_name in ('tv_embed', 'web'):
|
||||
for client_name in ('tv_unplugged', 'web'):
|
||||
client = self.build_client(client_name, client_data)
|
||||
if not client:
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -803,26 +803,21 @@ class YouTubeRequestClient(BaseRequestsClass):
|
|||
|
||||
if isinstance(keys, slice):
|
||||
next_key = path[idx + 1]
|
||||
parts = result[keys]
|
||||
if next_key is None:
|
||||
for part in result[keys]:
|
||||
new_result = cls.json_traverse(
|
||||
part,
|
||||
path[idx + 2:],
|
||||
default=default,
|
||||
)
|
||||
new_path = path[idx + 2:]
|
||||
for part in parts:
|
||||
new_result = cls.json_traverse(part, new_path, default)
|
||||
if not new_result or new_result == default:
|
||||
continue
|
||||
return new_result
|
||||
|
||||
if isinstance(next_key, range_type):
|
||||
results_limit = len(next_key)
|
||||
new_path = path[idx + 2:]
|
||||
new_results = []
|
||||
for part in result[keys]:
|
||||
new_result = cls.json_traverse(
|
||||
part,
|
||||
path[idx + 2:],
|
||||
default=default,
|
||||
)
|
||||
for part in parts:
|
||||
new_result = cls.json_traverse(part, new_path, default)
|
||||
if not new_result or new_result == default:
|
||||
continue
|
||||
new_results.append(new_result)
|
||||
|
|
@ -831,9 +826,10 @@ class YouTubeRequestClient(BaseRequestsClass):
|
|||
break
|
||||
results_limit -= 1
|
||||
else:
|
||||
new_path = path[idx + 1:]
|
||||
new_results = [
|
||||
cls.json_traverse(part, path[idx + 1:], default=default)
|
||||
for part in result[keys]
|
||||
cls.json_traverse(part, new_path, default)
|
||||
for part in parts
|
||||
if part
|
||||
]
|
||||
return new_results
|
||||
|
|
@ -843,7 +839,7 @@ class YouTubeRequestClient(BaseRequestsClass):
|
|||
|
||||
for key in keys:
|
||||
if isinstance(key, tuple):
|
||||
new_result = cls.json_traverse(result, key, default=default)
|
||||
new_result = cls.json_traverse(result, key, default)
|
||||
if new_result:
|
||||
result = new_result
|
||||
break
|
||||
|
|
|
|||
|
|
@ -103,26 +103,33 @@ class Subtitles(YouTubeRequestClient):
|
|||
|
||||
use_isa = not self.pre_download and use_mpd
|
||||
self.use_isa = use_isa
|
||||
default_format = None
|
||||
fallback_format = None
|
||||
if use_isa:
|
||||
if ('ttml' in stream_features
|
||||
and context.inputstream_adaptive_capabilities('ttml')):
|
||||
self.FORMATS['_default'] = 'ttml'
|
||||
self.FORMATS['_fallback'] = 'ttml'
|
||||
default_format = 'ttml'
|
||||
fallback_format = 'ttml'
|
||||
|
||||
if context.inputstream_adaptive_capabilities('vtt'):
|
||||
if 'vtt' in stream_features:
|
||||
self.FORMATS.setdefault('_default', 'vtt')
|
||||
self.FORMATS['_fallback'] = 'vtt'
|
||||
default_format = default_format or 'vtt'
|
||||
fallback_format = 'vtt'
|
||||
else:
|
||||
self.FORMATS.setdefault('_default', 'srt')
|
||||
self.FORMATS['_fallback'] = 'srt'
|
||||
else:
|
||||
default_format = default_format or 'srt'
|
||||
fallback_format = 'srt'
|
||||
|
||||
if not default_format or not use_isa:
|
||||
if ('vtt' in stream_features
|
||||
and context.get_system_version().compatible(20)):
|
||||
self.FORMATS['_default'] = 'vtt'
|
||||
self.FORMATS['_fallback'] = 'vtt'
|
||||
default_format = 'vtt'
|
||||
fallback_format = 'vtt'
|
||||
else:
|
||||
self.FORMATS['_default'] = 'srt'
|
||||
self.FORMATS['_fallback'] = 'srt'
|
||||
default_format = 'srt'
|
||||
fallback_format = 'srt'
|
||||
|
||||
self.FORMATS['_default'] = default_format
|
||||
self.FORMATS['_fallback'] = fallback_format
|
||||
|
||||
kodi_sub_lang = context.get_subtitle_language()
|
||||
plugin_lang = settings.get_language()
|
||||
|
|
@ -451,7 +458,6 @@ class Subtitles(YouTubeRequestClient):
|
|||
|
||||
subtitle_url = self._set_query_param(
|
||||
base_url,
|
||||
('type', 'track'),
|
||||
('fmt', sub_format),
|
||||
('tlang', tlang),
|
||||
('xosf', None),
|
||||
|
|
|
|||
|
|
@ -100,7 +100,6 @@ class ResourceManager(object):
|
|||
result = data_cache.get_items(
|
||||
ids,
|
||||
None if forced_cache else data_cache.ONE_DAY,
|
||||
memory_store=self.new_data,
|
||||
)
|
||||
to_update = (
|
||||
[]
|
||||
|
|
@ -194,7 +193,6 @@ class ResourceManager(object):
|
|||
result.update(data_cache.get_items(
|
||||
to_check,
|
||||
None if forced_cache else data_cache.ONE_MONTH,
|
||||
memory_store=self.new_data,
|
||||
))
|
||||
to_update = (
|
||||
[]
|
||||
|
|
@ -305,7 +303,6 @@ class ResourceManager(object):
|
|||
result = data_cache.get_items(
|
||||
ids,
|
||||
None if forced_cache else data_cache.ONE_DAY,
|
||||
memory_store=self.new_data,
|
||||
)
|
||||
to_update = (
|
||||
[]
|
||||
|
|
@ -578,7 +575,6 @@ class ResourceManager(object):
|
|||
result = data_cache.get_items(
|
||||
ids,
|
||||
None if forced_cache else data_cache.ONE_MONTH,
|
||||
memory_store=self.new_data,
|
||||
)
|
||||
to_update = (
|
||||
[]
|
||||
|
|
@ -658,33 +654,25 @@ class ResourceManager(object):
|
|||
return result
|
||||
|
||||
def cache_data(self, data=None, defer=False):
|
||||
if defer:
|
||||
if data:
|
||||
self.new_data.update(data)
|
||||
return
|
||||
if not data:
|
||||
return None
|
||||
|
||||
if self.new_data:
|
||||
flush = True
|
||||
if data:
|
||||
self.new_data.update(data)
|
||||
data = self.new_data
|
||||
else:
|
||||
flush = False
|
||||
if data:
|
||||
if self._incognito:
|
||||
self.log.debugging and self.log.debug(
|
||||
('Incognito mode active - discarded data for {num} item(s)',
|
||||
'IDs: {ids}'),
|
||||
num=len(data),
|
||||
ids=list(data),
|
||||
)
|
||||
else:
|
||||
self.log.debugging and self.log.debug(
|
||||
('Storing new data to cache for {num} item(s)',
|
||||
'IDs: {ids}'),
|
||||
num=len(data),
|
||||
ids=list(data),
|
||||
)
|
||||
self._context.get_data_cache().set_items(data)
|
||||
if flush:
|
||||
self.new_data = {}
|
||||
incognito = self._incognito
|
||||
if not defer and self.log.debugging:
|
||||
self.log.debug(
|
||||
(
|
||||
'Incognito mode active - discarded data for {num} item(s)',
|
||||
'IDs: {ids}'
|
||||
) if incognito else (
|
||||
'Storing new data to cache for {num} item(s)',
|
||||
'IDs: {ids}'
|
||||
),
|
||||
num=len(data),
|
||||
ids=list(data)
|
||||
)
|
||||
|
||||
return self._context.get_data_cache().set_items(
|
||||
data,
|
||||
defer=defer,
|
||||
flush=incognito,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -120,15 +120,14 @@ def _process_list_response(provider,
|
|||
item_params = yt_item.get('_params') or {}
|
||||
item_params.update(new_params)
|
||||
|
||||
item_id = None
|
||||
item_id = yt_item.get('id')
|
||||
snippet = yt_item.get('snippet', {})
|
||||
|
||||
video_id = None
|
||||
playlist_id = None
|
||||
channel_id = None
|
||||
|
||||
if is_youtube:
|
||||
item_id = yt_item.get('id')
|
||||
snippet = yt_item.get('snippet', {})
|
||||
|
||||
localised_info = snippet.get('localized') or {}
|
||||
title = (localised_info.get('title')
|
||||
or snippet.get('title')
|
||||
|
|
|
|||
|
|
@ -111,8 +111,9 @@ def process_default_settings(context, step, steps, **_kwargs):
|
|||
background=False,
|
||||
) as progress_dialog:
|
||||
progress_dialog.update()
|
||||
if settings.httpd_listen() == '0.0.0.0':
|
||||
settings.httpd_listen('127.0.0.1')
|
||||
ip_address = settings.httpd_listen()
|
||||
if ip_address == '0.0.0.0':
|
||||
ip_address = settings.httpd_listen('127.0.0.1')
|
||||
if not httpd_status(context):
|
||||
port = settings.httpd_port()
|
||||
addresses = get_listen_addresses()
|
||||
|
|
@ -120,13 +121,17 @@ def process_default_settings(context, step, steps, **_kwargs):
|
|||
for address in addresses:
|
||||
progress_dialog.update()
|
||||
if httpd_status(context, (address, port)):
|
||||
settings.httpd_listen(address)
|
||||
ip_address = settings.httpd_listen(address)
|
||||
break
|
||||
context.sleep(5)
|
||||
context.sleep(3)
|
||||
else:
|
||||
ui.show_notification(localize('httpd.connect.failed'),
|
||||
header=localize('httpd'))
|
||||
settings.httpd_listen('0.0.0.0')
|
||||
ip_address = None
|
||||
if ip_address:
|
||||
ui.on_ok(context.get_name(),
|
||||
context.localize('client.ip.is.x', ip_address))
|
||||
return step
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -313,14 +313,7 @@ class Provider(AbstractProvider):
|
|||
access_token='',
|
||||
refresh_token=refresh_token,
|
||||
)
|
||||
|
||||
client.set_access_token({
|
||||
client.TOKEN_TYPES[idx]: token
|
||||
for idx, token in enumerate(access_tokens)
|
||||
if token
|
||||
|
||||
})
|
||||
|
||||
client.set_access_token(access_tokens)
|
||||
return client
|
||||
|
||||
def get_resource_manager(self, context, progress_dialog=None):
|
||||
|
|
|
|||
|
|
@ -243,6 +243,7 @@
|
|||
</constraints>
|
||||
<control format="integer" type="slider">
|
||||
<popup>false</popup>
|
||||
<formatlabel>21436</formatlabel>
|
||||
</control>
|
||||
</setting>
|
||||
<setting id="youtube.view.hide_videos" type="list[string]" label="30808" help="">
|
||||
|
|
@ -757,6 +758,7 @@
|
|||
</constraints>
|
||||
<control format="integer" type="slider">
|
||||
<popup>false</popup>
|
||||
<formatlabel>37122</formatlabel>
|
||||
</control>
|
||||
</setting>
|
||||
<setting id="kodion.search.size" type="integer" label="30023" help="">
|
||||
|
|
@ -769,6 +771,7 @@
|
|||
</constraints>
|
||||
<control format="integer" type="slider">
|
||||
<popup>false</popup>
|
||||
<formatlabel>21436</formatlabel>
|
||||
</control>
|
||||
</setting>
|
||||
</group>
|
||||
|
|
@ -793,6 +796,7 @@
|
|||
</constraints>
|
||||
<control format="integer" type="slider">
|
||||
<popup>false</popup>
|
||||
<formatlabel>14045</formatlabel>
|
||||
</control>
|
||||
</setting>
|
||||
<setting id="youtube.view.filter.list" type="string" label="587" help="30583">
|
||||
|
|
@ -968,6 +972,7 @@
|
|||
</constraints>
|
||||
<control format="integer" type="slider">
|
||||
<popup>false</popup>
|
||||
<formatlabel>14047</formatlabel>
|
||||
</control>
|
||||
</setting>
|
||||
<setting id="youtube.playlist.watchlater.autoremove" type="boolean" label="30515" help="">
|
||||
|
|
@ -1023,6 +1028,7 @@
|
|||
</constraints>
|
||||
<control format="integer" type="slider">
|
||||
<popup>false</popup>
|
||||
<formatlabel>37122</formatlabel>
|
||||
</control>
|
||||
</setting>
|
||||
<setting id="requests.proxy.source" type="integer" label="713" help="36380">
|
||||
|
|
@ -1110,14 +1116,14 @@
|
|||
</setting>
|
||||
</group>
|
||||
<group id="http_server" label="30628">
|
||||
<setting id="kodion.http.listen" type="string" label="30643" help="">
|
||||
<setting id="kodion.http.listen" type="string" label="1006" help="">
|
||||
<level>0</level>
|
||||
<default>127.0.0.1</default>
|
||||
<control format="ip" type="edit">
|
||||
<heading>30643</heading>
|
||||
<heading>14068</heading>
|
||||
</control>
|
||||
</setting>
|
||||
<setting id="kodion.http.listen.select" type="action" label="30644" help="">
|
||||
<setting id="kodion.http.listen.select" type="action" parent="kodion.view.override" label="30644" help="">
|
||||
<level>0</level>
|
||||
<constraints>
|
||||
<allowempty>true</allowempty>
|
||||
|
|
@ -1127,7 +1133,15 @@
|
|||
<close>true</close>
|
||||
</control>
|
||||
</setting>
|
||||
<setting id="kodion.http.port" type="integer" label="730" help="">
|
||||
<setting id="kodion.http.client.ip" type="action" parent="kodion.view.override" label="30698" help="">
|
||||
<level>0</level>
|
||||
<constraints>
|
||||
<allowempty>true</allowempty>
|
||||
</constraints>
|
||||
<data>RunScript($ID,config/show_client_ip)</data>
|
||||
<control format="action" type="button"/>
|
||||
</setting>
|
||||
<setting id="kodion.http.port" type="integer" label="1013" help="">
|
||||
<level>0</level>
|
||||
<default>50152</default>
|
||||
<constraints>
|
||||
|
|
@ -1135,7 +1149,7 @@
|
|||
<maximum>65535</maximum>
|
||||
</constraints>
|
||||
<control format="integer" type="edit">
|
||||
<heading>730</heading>
|
||||
<heading>1018</heading>
|
||||
</control>
|
||||
</setting>
|
||||
<setting id="kodion.http.ip.whitelist" type="string" label="30629" help="">
|
||||
|
|
@ -1148,14 +1162,6 @@
|
|||
<heading>30629</heading>
|
||||
</control>
|
||||
</setting>
|
||||
<setting id="kodion.http.client.ip" type="action" label="30698" help="">
|
||||
<level>0</level>
|
||||
<constraints>
|
||||
<allowempty>true</allowempty>
|
||||
</constraints>
|
||||
<data>RunScript($ID,config/show_client_ip)</data>
|
||||
<control format="action" type="button"/>
|
||||
</setting>
|
||||
<setting id="youtube.http.idle_sleep" type="boolean" label="13018" help="">
|
||||
<level>0</level>
|
||||
<default>true</default>
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue