2017-04-09 11:31:03 +10:00
|
|
|
# Gnu General Public License - see LICENSE.TXT
|
2020-09-05 17:29:38 -04:00
|
|
|
from __future__ import division, absolute_import, print_function, unicode_literals
|
2017-04-09 11:31:03 +10:00
|
|
|
|
2017-12-28 13:11:18 +11:00
|
|
|
from collections import defaultdict
|
2018-11-11 15:08:07 +11:00
|
|
|
import threading
|
|
|
|
|
import hashlib
|
|
|
|
|
import os
|
2018-11-21 13:23:42 +11:00
|
|
|
import time
|
2021-01-02 09:52:37 -05:00
|
|
|
from six.moves import cPickle
|
2014-10-30 14:29:19 +11:00
|
|
|
|
2019-01-11 10:24:42 +11:00
|
|
|
from .downloadutils import DownloadUtils
|
2020-07-25 01:01:30 -04:00
|
|
|
from .loghandler import LazyLogger
|
2019-01-11 10:24:42 +11:00
|
|
|
from .item_functions import extract_item_info
|
2018-11-11 16:32:25 +11:00
|
|
|
from .kodi_utils import HomeWindow
|
2019-03-03 10:43:56 +11:00
|
|
|
from .translation import string_load
|
2019-10-14 11:55:18 +11:00
|
|
|
from .tracking import timer
|
2020-07-03 15:38:37 +10:00
|
|
|
from .filelock import FileLock
|
2018-11-11 15:08:07 +11:00
|
|
|
|
|
|
|
|
import xbmc
|
|
|
|
|
import xbmcaddon
|
2019-03-03 10:43:56 +11:00
|
|
|
import xbmcvfs
|
|
|
|
|
import xbmcgui
|
2014-10-30 14:29:19 +11:00
|
|
|
|
2020-07-25 01:01:30 -04:00
|
|
|
log = LazyLogger(__name__)
|
2017-03-10 10:45:38 +11:00
|
|
|
|
2018-11-21 13:23:42 +11:00
|
|
|
|
2019-03-03 10:43:56 +11:00
|
|
|
class CacheItem:
|
2018-11-21 13:23:42 +11:00
|
|
|
item_list = None
|
|
|
|
|
item_list_hash = None
|
|
|
|
|
date_saved = None
|
2019-07-07 20:04:19 +10:00
|
|
|
date_last_used = None
|
2018-11-21 13:23:42 +11:00
|
|
|
last_action = None
|
|
|
|
|
items_url = None
|
|
|
|
|
file_path = None
|
2019-07-07 20:04:19 +10:00
|
|
|
user_id = None
|
2018-11-21 13:23:42 +11:00
|
|
|
|
|
|
|
|
def __init__(self, *args):
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
2019-03-03 10:43:56 +11:00
|
|
|
class DataManager:
|
2018-11-11 15:08:07 +11:00
|
|
|
|
|
|
|
|
addon_dir = xbmc.translatePath(xbmcaddon.Addon().getAddonInfo('profile'))
|
2017-05-29 06:19:33 -04:00
|
|
|
|
2014-12-28 19:50:22 +11:00
|
|
|
def __init__(self, *args):
|
2018-11-21 13:23:42 +11:00
|
|
|
pass
|
2014-12-28 19:50:22 +11:00
|
|
|
|
2019-10-14 11:55:18 +11:00
|
|
|
@timer
|
2020-06-21 11:27:09 +10:00
|
|
|
def get_content(self, url):
|
2020-11-11 22:50:26 -05:00
|
|
|
return DownloadUtils().download_url(url)
|
2017-04-19 13:23:44 +10:00
|
|
|
|
2019-10-14 11:55:18 +11:00
|
|
|
@timer
|
2018-11-11 16:32:25 +11:00
|
|
|
def get_items(self, url, gui_options, use_cache=False):
|
|
|
|
|
|
|
|
|
|
home_window = HomeWindow()
|
2020-07-25 01:01:30 -04:00
|
|
|
log.debug("last_content_url : use_cache={0} url={1}".format(use_cache, url))
|
2020-06-21 11:27:09 +10:00
|
|
|
home_window.set_property("last_content_url", url)
|
2018-11-11 15:08:07 +11:00
|
|
|
|
2019-03-01 09:27:33 +11:00
|
|
|
download_utils = DownloadUtils()
|
2020-06-21 11:27:09 +10:00
|
|
|
user_id = download_utils.get_user_id()
|
|
|
|
|
server = download_utils.get_server()
|
2018-11-11 17:35:09 +11:00
|
|
|
|
2018-11-11 15:08:07 +11:00
|
|
|
m = hashlib.md5()
|
2021-01-26 22:34:51 -05:00
|
|
|
m.update('{}|{}|{}'.format(user_id, server, url).encode())
|
2018-11-11 15:08:07 +11:00
|
|
|
url_hash = m.hexdigest()
|
|
|
|
|
cache_file = os.path.join(self.addon_dir, "cache_" + url_hash + ".pickle")
|
|
|
|
|
|
2018-11-21 13:23:42 +11:00
|
|
|
item_list = None
|
2019-02-05 16:41:39 +11:00
|
|
|
total_records = 0
|
2018-11-11 15:08:07 +11:00
|
|
|
baseline_name = None
|
|
|
|
|
cache_thread = CacheManagerThread()
|
|
|
|
|
cache_thread.gui_options = gui_options
|
|
|
|
|
|
2020-06-21 11:27:09 +10:00
|
|
|
home_window.set_property(cache_file, "true")
|
2018-11-13 11:36:30 +11:00
|
|
|
|
2020-06-21 11:27:09 +10:00
|
|
|
clear_cache = home_window.get_property("skip_cache_for_" + url)
|
2018-11-13 14:46:28 +11:00
|
|
|
if clear_cache and os.path.isfile(cache_file):
|
|
|
|
|
log.debug("Clearing cache data and loading new data")
|
2020-06-21 11:27:09 +10:00
|
|
|
home_window.clear_property("skip_cache_for_" + url)
|
2018-11-13 14:46:28 +11:00
|
|
|
os.remove(cache_file)
|
|
|
|
|
|
2019-02-05 16:41:39 +11:00
|
|
|
# try to load the list item data from the cache
|
2018-11-11 16:32:25 +11:00
|
|
|
if os.path.isfile(cache_file) and use_cache:
|
2018-11-15 10:01:21 +11:00
|
|
|
log.debug("Loading url data from cached pickle data")
|
2018-11-11 15:08:07 +11:00
|
|
|
|
2020-07-03 15:38:37 +10:00
|
|
|
with FileLock(cache_file + ".locked", timeout=5):
|
|
|
|
|
with open(cache_file, 'rb') as handle:
|
|
|
|
|
try:
|
|
|
|
|
cache_item = cPickle.load(handle)
|
|
|
|
|
cache_thread.cached_item = cache_item
|
|
|
|
|
item_list = cache_item.item_list
|
|
|
|
|
total_records = cache_item.total_records
|
|
|
|
|
except Exception as err:
|
2020-07-25 01:01:30 -04:00
|
|
|
log.error("Pickle Data Load Failed : {0}".format(err))
|
2020-07-03 15:38:37 +10:00
|
|
|
item_list = None
|
2018-11-21 13:23:42 +11:00
|
|
|
|
|
|
|
|
# we need to load the list item data form the server
|
2020-04-18 20:21:03 +10:00
|
|
|
if item_list is None or len(item_list) == 0:
|
2018-11-11 15:08:07 +11:00
|
|
|
log.debug("Loading url data from server")
|
|
|
|
|
|
2020-06-21 11:27:09 +10:00
|
|
|
results = self.get_content(url)
|
2018-11-11 15:08:07 +11:00
|
|
|
|
|
|
|
|
if results is None:
|
|
|
|
|
results = []
|
|
|
|
|
|
2019-02-05 16:41:39 +11:00
|
|
|
if isinstance(results, dict):
|
|
|
|
|
total_records = results.get("TotalRecordCount", 0)
|
|
|
|
|
|
2018-11-11 15:08:07 +11:00
|
|
|
if isinstance(results, dict) and results.get("Items") is not None:
|
|
|
|
|
baseline_name = results.get("BaselineItemName")
|
|
|
|
|
results = results.get("Items", [])
|
|
|
|
|
elif isinstance(results, list) and len(results) > 0 and results[0].get("Items") is not None:
|
|
|
|
|
baseline_name = results[0].get("BaselineItemName")
|
|
|
|
|
results = results[0].get("Items")
|
|
|
|
|
|
2018-11-21 13:23:42 +11:00
|
|
|
item_list = []
|
2018-11-11 15:08:07 +11:00
|
|
|
for item in results:
|
|
|
|
|
item_data = extract_item_info(item, gui_options)
|
2018-11-12 10:55:47 +11:00
|
|
|
item_data.baseline_itemname = baseline_name
|
2018-11-11 15:08:07 +11:00
|
|
|
item_list.append(item_data)
|
|
|
|
|
|
2018-11-21 13:23:42 +11:00
|
|
|
cache_item = CacheItem()
|
|
|
|
|
cache_item.item_list = item_list
|
|
|
|
|
cache_item.file_path = cache_file
|
|
|
|
|
cache_item.items_url = url
|
2019-07-07 20:04:19 +10:00
|
|
|
cache_item.user_id = user_id
|
2018-11-21 13:23:42 +11:00
|
|
|
cache_item.last_action = "fresh_data"
|
|
|
|
|
cache_item.date_saved = time.time()
|
2019-07-07 20:04:19 +10:00
|
|
|
cache_item.date_last_used = time.time()
|
2019-02-05 16:41:39 +11:00
|
|
|
cache_item.total_records = total_records
|
2018-11-21 13:23:42 +11:00
|
|
|
|
|
|
|
|
cache_thread.cached_item = cache_item
|
2018-11-11 15:08:07 +11:00
|
|
|
|
2020-07-03 15:38:37 +10:00
|
|
|
if not use_cache:
|
|
|
|
|
cache_thread = None
|
2018-11-11 15:08:07 +11:00
|
|
|
|
2020-07-03 15:38:37 +10:00
|
|
|
return cache_file, item_list, total_records, cache_thread
|
2018-11-11 15:08:07 +11:00
|
|
|
|
|
|
|
|
|
|
|
|
|
class CacheManagerThread(threading.Thread):
|
2018-11-21 13:23:42 +11:00
|
|
|
cached_item = None
|
2018-11-11 15:08:07 +11:00
|
|
|
gui_options = None
|
|
|
|
|
|
|
|
|
|
def __init__(self, *args):
|
|
|
|
|
threading.Thread.__init__(self, *args)
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
|
def get_data_hash(items):
|
|
|
|
|
|
|
|
|
|
m = hashlib.md5()
|
|
|
|
|
for item in items:
|
2018-11-13 11:36:30 +11:00
|
|
|
item_string = "%s_%s_%s_%s_%s_%s" % (
|
2018-11-11 15:08:07 +11:00
|
|
|
item.name,
|
|
|
|
|
item.play_count,
|
|
|
|
|
item.favorite,
|
|
|
|
|
item.resume_time,
|
2018-11-13 11:36:30 +11:00
|
|
|
item.recursive_unplayed_items_count,
|
|
|
|
|
item.etag
|
2018-11-11 15:08:07 +11:00
|
|
|
)
|
|
|
|
|
item_string = item_string.encode("UTF-8")
|
|
|
|
|
m.update(item_string)
|
|
|
|
|
|
|
|
|
|
return m.hexdigest()
|
|
|
|
|
|
|
|
|
|
def run(self):
|
|
|
|
|
|
|
|
|
|
log.debug("CacheManagerThread : Started")
|
|
|
|
|
|
2018-11-11 16:32:25 +11:00
|
|
|
home_window = HomeWindow()
|
2018-11-21 13:23:42 +11:00
|
|
|
is_fresh = False
|
|
|
|
|
|
|
|
|
|
# if the data is fresh then just save it
|
|
|
|
|
# if the data is to old do a reload
|
|
|
|
|
if (self.cached_item.date_saved is not None
|
|
|
|
|
and (time.time() - self.cached_item.date_saved) < 20
|
|
|
|
|
and self.cached_item.last_action == "fresh_data"):
|
|
|
|
|
is_fresh = True
|
2018-11-11 16:32:25 +11:00
|
|
|
|
2020-04-18 20:21:03 +10:00
|
|
|
if is_fresh and self.cached_item.item_list is not None and len(self.cached_item.item_list) > 0:
|
2018-11-21 13:23:42 +11:00
|
|
|
log.debug("CacheManagerThread : Saving fresh data")
|
|
|
|
|
cached_hash = self.get_data_hash(self.cached_item.item_list)
|
|
|
|
|
self.cached_item.item_list_hash = cached_hash
|
|
|
|
|
self.cached_item.last_action = "cached_data"
|
|
|
|
|
self.cached_item.date_saved = time.time()
|
2019-07-07 20:04:19 +10:00
|
|
|
self.cached_item.date_last_used = time.time()
|
2018-11-12 10:55:47 +11:00
|
|
|
|
2020-07-03 15:38:37 +10:00
|
|
|
log.debug("CacheManagerThread : Saving New Data loops")
|
2018-11-12 15:59:47 +11:00
|
|
|
|
2020-07-03 15:38:37 +10:00
|
|
|
with FileLock(self.cached_item.file_path + ".locked", timeout=5):
|
|
|
|
|
with open(self.cached_item.file_path, 'wb') as handle:
|
|
|
|
|
cPickle.dump(self.cached_item, handle, protocol=cPickle.HIGHEST_PROTOCOL)
|
2018-11-11 15:08:07 +11:00
|
|
|
|
2018-11-12 10:55:47 +11:00
|
|
|
else:
|
2018-11-21 13:23:42 +11:00
|
|
|
log.debug("CacheManagerThread : Reloading to recheck data hashes")
|
|
|
|
|
cached_hash = self.cached_item.item_list_hash
|
2020-07-25 01:01:30 -04:00
|
|
|
log.debug("CacheManagerThread : Cache Hash : {0}".format(cached_hash))
|
2018-11-11 15:08:07 +11:00
|
|
|
|
2018-11-12 10:55:47 +11:00
|
|
|
data_manager = DataManager()
|
2020-06-21 11:27:09 +10:00
|
|
|
results = data_manager.get_content(self.cached_item.items_url)
|
2018-11-12 10:55:47 +11:00
|
|
|
if results is None:
|
|
|
|
|
results = []
|
2018-11-11 15:08:07 +11:00
|
|
|
|
2018-11-12 10:55:47 +11:00
|
|
|
if isinstance(results, dict) and results.get("Items") is not None:
|
|
|
|
|
results = results.get("Items", [])
|
|
|
|
|
elif isinstance(results, list) and len(results) > 0 and results[0].get("Items") is not None:
|
|
|
|
|
results = results[0].get("Items")
|
2018-11-11 15:08:07 +11:00
|
|
|
|
2019-02-05 16:41:39 +11:00
|
|
|
total_records = 0
|
|
|
|
|
if isinstance(results, dict):
|
|
|
|
|
total_records = results.get("TotalRecordCount", 0)
|
|
|
|
|
|
2018-11-12 10:55:47 +11:00
|
|
|
loaded_items = []
|
|
|
|
|
for item in results:
|
|
|
|
|
item_data = extract_item_info(item, self.gui_options)
|
|
|
|
|
loaded_items.append(item_data)
|
2018-11-11 15:08:07 +11:00
|
|
|
|
2020-04-18 20:21:03 +10:00
|
|
|
if loaded_items is None or len(loaded_items) == 0:
|
|
|
|
|
log.debug("CacheManagerThread : loaded_items is None or Empty so not saving it")
|
|
|
|
|
return
|
|
|
|
|
|
2018-11-12 10:55:47 +11:00
|
|
|
loaded_hash = self.get_data_hash(loaded_items)
|
2020-07-25 01:01:30 -04:00
|
|
|
log.debug("CacheManagerThread : Loaded Hash : {0}".format(loaded_hash))
|
2018-11-11 15:08:07 +11:00
|
|
|
|
2018-11-12 10:55:47 +11:00
|
|
|
# if they dont match then save the data and trigger a content reload
|
|
|
|
|
if cached_hash != loaded_hash:
|
2018-11-21 13:23:42 +11:00
|
|
|
log.debug("CacheManagerThread : Hashes different, saving new data and reloading container")
|
|
|
|
|
|
|
|
|
|
self.cached_item.item_list = loaded_items
|
|
|
|
|
self.cached_item.item_list_hash = loaded_hash
|
|
|
|
|
self.cached_item.last_action = "fresh_data"
|
|
|
|
|
self.cached_item.date_saved = time.time()
|
2019-07-07 20:04:19 +10:00
|
|
|
self.cached_item.date_last_used = time.time()
|
2019-02-05 16:41:39 +11:00
|
|
|
self.cached_item.total_records = total_records
|
2018-11-11 15:08:07 +11:00
|
|
|
|
2020-07-03 15:38:37 +10:00
|
|
|
with FileLock(self.cached_item.file_path + ".locked", timeout=5):
|
|
|
|
|
with open(self.cached_item.file_path, 'wb') as handle:
|
|
|
|
|
cPickle.dump(self.cached_item, handle, protocol=cPickle.HIGHEST_PROTOCOL)
|
2018-11-21 13:23:42 +11:00
|
|
|
|
2020-07-04 16:30:42 +10:00
|
|
|
# TODO: probably should only set this in simple check mode
|
|
|
|
|
current_time_stamp = str(time.time())
|
2020-07-04 13:26:21 -04:00
|
|
|
home_window.set_property("jellycon_widget_reload", current_time_stamp)
|
2020-07-25 01:01:30 -04:00
|
|
|
log.debug("Setting New Widget Hash: {0}".format(current_time_stamp))
|
2020-07-04 16:30:42 +10:00
|
|
|
|
2020-07-03 15:38:37 +10:00
|
|
|
log.debug("CacheManagerThread : Sending container refresh")
|
2018-11-12 10:55:47 +11:00
|
|
|
xbmc.executebuiltin("Container.Refresh")
|
2014-10-30 14:29:19 +11:00
|
|
|
|
2019-07-07 20:04:19 +10:00
|
|
|
else:
|
|
|
|
|
self.cached_item.date_last_used = time.time()
|
2020-07-03 15:38:37 +10:00
|
|
|
with FileLock(self.cached_item.file_path + ".locked", timeout=5):
|
|
|
|
|
with open(self.cached_item.file_path, 'wb') as handle:
|
|
|
|
|
cPickle.dump(self.cached_item, handle, protocol=cPickle.HIGHEST_PROTOCOL)
|
|
|
|
|
log.debug("CacheManagerThread : Updating last used date for cache data")
|
2019-07-07 20:04:19 +10:00
|
|
|
|
2018-11-11 15:08:07 +11:00
|
|
|
log.debug("CacheManagerThread : Exited")
|
2019-03-03 10:43:56 +11:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def clear_cached_server_data():
|
|
|
|
|
log.debug("clear_cached_server_data() called")
|
|
|
|
|
|
|
|
|
|
addon_dir = xbmc.translatePath(xbmcaddon.Addon().getAddonInfo('profile'))
|
|
|
|
|
dirs, files = xbmcvfs.listdir(addon_dir)
|
|
|
|
|
|
|
|
|
|
del_count = 0
|
|
|
|
|
for filename in files:
|
|
|
|
|
if filename.startswith("cache_") and filename.endswith(".pickle"):
|
2020-07-25 01:01:30 -04:00
|
|
|
log.debug("Deleteing CacheFile: {0}".format(filename))
|
2019-03-03 10:43:56 +11:00
|
|
|
xbmcvfs.delete(os.path.join(addon_dir, filename))
|
|
|
|
|
del_count += 1
|
|
|
|
|
|
2021-01-02 18:18:21 -05:00
|
|
|
log.debug('Deleted {} files'.format(del_count))
|
2021-01-02 18:10:59 -05:00
|
|
|
msg = string_load(30394)
|
2019-03-03 10:43:56 +11:00
|
|
|
xbmcgui.Dialog().ok(string_load(30393), msg)
|
|
|
|
|
|
|
|
|
|
|
2019-07-07 20:04:19 +10:00
|
|
|
def clear_old_cache_data():
|
2019-07-21 12:29:11 +10:00
|
|
|
log.debug("clear_old_cache_data() : called")
|
2019-07-07 20:04:19 +10:00
|
|
|
|
|
|
|
|
addon_dir = xbmc.translatePath(xbmcaddon.Addon().getAddonInfo('profile'))
|
|
|
|
|
dirs, files = xbmcvfs.listdir(addon_dir)
|
|
|
|
|
|
|
|
|
|
del_count = 0
|
|
|
|
|
for filename in files:
|
|
|
|
|
if filename.startswith("cache_") and filename.endswith(".pickle"):
|
2020-07-25 01:01:30 -04:00
|
|
|
log.debug("clear_old_cache_data() : Checking CacheFile : {0}".format(filename))
|
2019-07-07 20:04:19 +10:00
|
|
|
|
2019-07-21 12:29:11 +10:00
|
|
|
cache_item = None
|
|
|
|
|
for x in range(0, 5):
|
|
|
|
|
try:
|
2020-07-03 15:38:37 +10:00
|
|
|
data_file = os.path.join(addon_dir, filename)
|
|
|
|
|
with FileLock(data_file + ".locked", timeout=5):
|
|
|
|
|
with open(data_file, 'rb') as handle:
|
|
|
|
|
cache_item = cPickle.load(handle)
|
2019-07-21 12:29:11 +10:00
|
|
|
break
|
|
|
|
|
except Exception as error:
|
2020-07-25 01:01:30 -04:00
|
|
|
log.debug("clear_old_cache_data() : Pickle load error : {0}".format(error))
|
2019-07-21 12:29:11 +10:00
|
|
|
cache_item = None
|
|
|
|
|
xbmc.sleep(1000)
|
|
|
|
|
|
|
|
|
|
if cache_item is not None:
|
|
|
|
|
item_last_used = -1
|
|
|
|
|
if cache_item.date_last_used is not None:
|
|
|
|
|
item_last_used = time.time() - cache_item.date_last_used
|
|
|
|
|
|
2020-07-25 01:01:30 -04:00
|
|
|
log.debug("clear_old_cache_data() : Cache item last used : {0} sec ago".format(item_last_used))
|
2019-07-21 12:29:11 +10:00
|
|
|
if item_last_used == -1 or item_last_used > (3600 * 24 * 7):
|
2020-07-25 01:01:30 -04:00
|
|
|
log.debug("clear_old_cache_data() : Deleting cache item age : {0}".format(item_last_used))
|
2020-07-03 15:38:37 +10:00
|
|
|
data_file = os.path.join(addon_dir, filename)
|
|
|
|
|
with FileLock(data_file + ".locked", timeout=5):
|
|
|
|
|
xbmcvfs.delete(data_file)
|
2019-07-21 12:29:11 +10:00
|
|
|
del_count += 1
|
|
|
|
|
else:
|
|
|
|
|
log.debug("clear_old_cache_data() : Deleting unloadable cache item")
|
2020-07-03 15:38:37 +10:00
|
|
|
data_file = os.path.join(addon_dir, filename)
|
|
|
|
|
with FileLock(data_file + ".locked", timeout=5):
|
|
|
|
|
xbmcvfs.delete(data_file)
|
2019-07-07 20:04:19 +10:00
|
|
|
|
2020-07-25 01:01:30 -04:00
|
|
|
log.debug("clear_old_cache_data() : Cache items deleted : {0}".format(del_count))
|