Files
jellycon/resources/lib/datamanager.py

325 lines
12 KiB
Python
Raw Normal View History

# Gnu General Public License - see LICENSE.TXT
import json
2017-12-28 13:11:18 +11:00
from collections import defaultdict
2018-11-11 15:08:07 +11:00
import threading
import hashlib
import os
import cPickle
2018-11-21 13:23:42 +11:00
import time
2014-10-30 14:29:19 +11:00
2019-01-11 10:24:42 +11:00
from .downloadutils import DownloadUtils
from .simple_logging import SimpleLogging
from .item_functions import extract_item_info
from .kodi_utils import HomeWindow
2019-03-03 10:43:56 +11:00
from .translation import string_load
2019-10-14 11:55:18 +11:00
from .tracking import timer
from .filelock import FileLock
2018-11-11 15:08:07 +11:00
import xbmc
import xbmcaddon
2019-03-03 10:43:56 +11:00
import xbmcvfs
import xbmcgui
2014-10-30 14:29:19 +11:00
log = SimpleLogging(__name__)
2017-03-10 10:45:38 +11:00
2018-11-21 13:23:42 +11:00
2019-03-03 10:43:56 +11:00
class CacheItem:
2018-11-21 13:23:42 +11:00
item_list = None
item_list_hash = None
date_saved = None
date_last_used = None
2018-11-21 13:23:42 +11:00
last_action = None
items_url = None
file_path = None
user_id = None
2018-11-21 13:23:42 +11:00
def __init__(self, *args):
pass
2019-03-03 10:43:56 +11:00
class DataManager:
2018-11-11 15:08:07 +11:00
addon_dir = xbmc.translatePath(xbmcaddon.Addon().getAddonInfo('profile'))
def __init__(self, *args):
2018-11-21 13:23:42 +11:00
# log.debug("DataManager __init__")
pass
2020-06-21 11:27:09 +10:00
@staticmethod
def load_json_data(json_data):
return json.loads(json_data, object_hook=lambda d: defaultdict(lambda: None, d))
2019-10-14 11:55:18 +11:00
@timer
2020-06-21 11:27:09 +10:00
def get_content(self, url):
json_data = DownloadUtils().download_url(url)
result = self.load_json_data(json_data)
2017-12-07 19:54:09 +11:00
return result
2017-04-19 13:23:44 +10:00
2019-10-14 11:55:18 +11:00
@timer
def get_items(self, url, gui_options, use_cache=False):
home_window = HomeWindow()
2018-12-25 07:22:37 +11:00
log.debug("last_content_url : use_cache={0} url={1}", use_cache, url)
2020-06-21 11:27:09 +10:00
home_window.set_property("last_content_url", url)
2018-11-11 15:08:07 +11:00
download_utils = DownloadUtils()
2020-06-21 11:27:09 +10:00
user_id = download_utils.get_user_id()
server = download_utils.get_server()
2018-11-11 17:35:09 +11:00
2018-11-11 15:08:07 +11:00
m = hashlib.md5()
m.update(user_id + "|" + str(server) + "|" + url)
2018-11-11 15:08:07 +11:00
url_hash = m.hexdigest()
cache_file = os.path.join(self.addon_dir, "cache_" + url_hash + ".pickle")
2020-06-21 11:27:09 +10:00
# changed_url = url + "&MinDateLastSavedForUser=" + urllib.unquote("2019-09-16T13:45:30")
# results = self.GetContent(changed_url)
# log.debug("DataManager Changes Since Date : {0}", results)
2018-11-18 14:03:23 +11:00
2018-11-21 13:23:42 +11:00
item_list = None
total_records = 0
2018-11-11 15:08:07 +11:00
baseline_name = None
cache_thread = CacheManagerThread()
cache_thread.gui_options = gui_options
2020-06-21 11:27:09 +10:00
home_window.set_property(cache_file, "true")
2020-06-21 11:27:09 +10:00
clear_cache = home_window.get_property("skip_cache_for_" + url)
if clear_cache and os.path.isfile(cache_file):
log.debug("Clearing cache data and loading new data")
2020-06-21 11:27:09 +10:00
home_window.clear_property("skip_cache_for_" + url)
os.remove(cache_file)
# try to load the list item data from the cache
if os.path.isfile(cache_file) and use_cache:
2018-11-15 10:01:21 +11:00
log.debug("Loading url data from cached pickle data")
2018-11-11 15:08:07 +11:00
with FileLock(cache_file + ".locked", timeout=5):
with open(cache_file, 'rb') as handle:
try:
cache_item = cPickle.load(handle)
cache_thread.cached_item = cache_item
item_list = cache_item.item_list
total_records = cache_item.total_records
except Exception as err:
log.error("Pickle Data Load Failed : {0}", err)
item_list = None
2018-11-21 13:23:42 +11:00
# we need to load the list item data form the server
if item_list is None or len(item_list) == 0:
2018-11-11 15:08:07 +11:00
log.debug("Loading url data from server")
2020-06-21 11:27:09 +10:00
results = self.get_content(url)
2018-11-11 15:08:07 +11:00
if results is None:
results = []
if isinstance(results, dict):
total_records = results.get("TotalRecordCount", 0)
2018-11-11 15:08:07 +11:00
if isinstance(results, dict) and results.get("Items") is not None:
baseline_name = results.get("BaselineItemName")
results = results.get("Items", [])
elif isinstance(results, list) and len(results) > 0 and results[0].get("Items") is not None:
baseline_name = results[0].get("BaselineItemName")
results = results[0].get("Items")
2018-11-21 13:23:42 +11:00
item_list = []
2018-11-11 15:08:07 +11:00
for item in results:
item_data = extract_item_info(item, gui_options)
item_data.baseline_itemname = baseline_name
2018-11-11 15:08:07 +11:00
item_list.append(item_data)
2018-11-21 13:23:42 +11:00
cache_item = CacheItem()
cache_item.item_list = item_list
cache_item.file_path = cache_file
cache_item.items_url = url
cache_item.user_id = user_id
2018-11-21 13:23:42 +11:00
cache_item.last_action = "fresh_data"
cache_item.date_saved = time.time()
cache_item.date_last_used = time.time()
cache_item.total_records = total_records
2018-11-21 13:23:42 +11:00
cache_thread.cached_item = cache_item
# copy.deepcopy(item_list)
2018-11-11 15:08:07 +11:00
if not use_cache:
cache_thread = None
2018-11-11 15:08:07 +11:00
return cache_file, item_list, total_records, cache_thread
2018-11-11 15:08:07 +11:00
class CacheManagerThread(threading.Thread):
2018-11-21 13:23:42 +11:00
cached_item = None
2018-11-11 15:08:07 +11:00
gui_options = None
def __init__(self, *args):
threading.Thread.__init__(self, *args)
@staticmethod
def get_data_hash(items):
m = hashlib.md5()
for item in items:
item_string = "%s_%s_%s_%s_%s_%s" % (
2018-11-11 15:08:07 +11:00
item.name,
item.play_count,
item.favorite,
item.resume_time,
item.recursive_unplayed_items_count,
item.etag
2018-11-11 15:08:07 +11:00
)
item_string = item_string.encode("UTF-8")
m.update(item_string)
return m.hexdigest()
def run(self):
log.debug("CacheManagerThread : Started")
# log.debug("CacheManagerThread : Cache Item : {0}", self.cached_item.__dict__)
2018-11-11 15:08:07 +11:00
home_window = HomeWindow()
2018-11-21 13:23:42 +11:00
is_fresh = False
# if the data is fresh then just save it
# if the data is to old do a reload
if (self.cached_item.date_saved is not None
and (time.time() - self.cached_item.date_saved) < 20
and self.cached_item.last_action == "fresh_data"):
is_fresh = True
if is_fresh and self.cached_item.item_list is not None and len(self.cached_item.item_list) > 0:
2018-11-21 13:23:42 +11:00
log.debug("CacheManagerThread : Saving fresh data")
cached_hash = self.get_data_hash(self.cached_item.item_list)
self.cached_item.item_list_hash = cached_hash
self.cached_item.last_action = "cached_data"
self.cached_item.date_saved = time.time()
self.cached_item.date_last_used = time.time()
log.debug("CacheManagerThread : Saving New Data loops")
with FileLock(self.cached_item.file_path + ".locked", timeout=5):
with open(self.cached_item.file_path, 'wb') as handle:
cPickle.dump(self.cached_item, handle, protocol=cPickle.HIGHEST_PROTOCOL)
2018-11-11 15:08:07 +11:00
else:
2018-11-21 13:23:42 +11:00
log.debug("CacheManagerThread : Reloading to recheck data hashes")
cached_hash = self.cached_item.item_list_hash
log.debug("CacheManagerThread : Cache Hash : {0}", cached_hash)
2018-11-11 15:08:07 +11:00
data_manager = DataManager()
2020-06-21 11:27:09 +10:00
results = data_manager.get_content(self.cached_item.items_url)
if results is None:
results = []
2018-11-11 15:08:07 +11:00
if isinstance(results, dict) and results.get("Items") is not None:
results = results.get("Items", [])
elif isinstance(results, list) and len(results) > 0 and results[0].get("Items") is not None:
results = results[0].get("Items")
2018-11-11 15:08:07 +11:00
total_records = 0
if isinstance(results, dict):
total_records = results.get("TotalRecordCount", 0)
loaded_items = []
for item in results:
item_data = extract_item_info(item, self.gui_options)
loaded_items.append(item_data)
2018-11-11 15:08:07 +11:00
if loaded_items is None or len(loaded_items) == 0:
log.debug("CacheManagerThread : loaded_items is None or Empty so not saving it")
return
loaded_hash = self.get_data_hash(loaded_items)
log.debug("CacheManagerThread : Loaded Hash : {0}", loaded_hash)
2018-11-11 15:08:07 +11:00
# if they dont match then save the data and trigger a content reload
if cached_hash != loaded_hash:
2018-11-21 13:23:42 +11:00
log.debug("CacheManagerThread : Hashes different, saving new data and reloading container")
self.cached_item.item_list = loaded_items
self.cached_item.item_list_hash = loaded_hash
self.cached_item.last_action = "fresh_data"
self.cached_item.date_saved = time.time()
self.cached_item.date_last_used = time.time()
self.cached_item.total_records = total_records
2018-11-11 15:08:07 +11:00
with FileLock(self.cached_item.file_path + ".locked", timeout=5):
with open(self.cached_item.file_path, 'wb') as handle:
cPickle.dump(self.cached_item, handle, protocol=cPickle.HIGHEST_PROTOCOL)
2018-11-21 13:23:42 +11:00
log.debug("CacheManagerThread : Sending container refresh")
xbmc.executebuiltin("Container.Refresh")
2014-10-30 14:29:19 +11:00
else:
self.cached_item.date_last_used = time.time()
with FileLock(self.cached_item.file_path + ".locked", timeout=5):
with open(self.cached_item.file_path, 'wb') as handle:
cPickle.dump(self.cached_item, handle, protocol=cPickle.HIGHEST_PROTOCOL)
log.debug("CacheManagerThread : Updating last used date for cache data")
2018-11-11 15:08:07 +11:00
log.debug("CacheManagerThread : Exited")
2019-03-03 10:43:56 +11:00
def clear_cached_server_data():
log.debug("clear_cached_server_data() called")
addon_dir = xbmc.translatePath(xbmcaddon.Addon().getAddonInfo('profile'))
dirs, files = xbmcvfs.listdir(addon_dir)
del_count = 0
for filename in files:
if filename.startswith("cache_") and filename.endswith(".pickle"):
log.debug("Deleteing CacheFile: {0}", filename)
xbmcvfs.delete(os.path.join(addon_dir, filename))
del_count += 1
msg = string_load(30394) % del_count
xbmcgui.Dialog().ok(string_load(30393), msg)
def clear_old_cache_data():
log.debug("clear_old_cache_data() : called")
addon_dir = xbmc.translatePath(xbmcaddon.Addon().getAddonInfo('profile'))
dirs, files = xbmcvfs.listdir(addon_dir)
del_count = 0
for filename in files:
if filename.startswith("cache_") and filename.endswith(".pickle"):
log.debug("clear_old_cache_data() : Checking CacheFile : {0}", filename)
cache_item = None
for x in range(0, 5):
try:
data_file = os.path.join(addon_dir, filename)
with FileLock(data_file + ".locked", timeout=5):
with open(data_file, 'rb') as handle:
cache_item = cPickle.load(handle)
break
except Exception as error:
log.debug("clear_old_cache_data() : Pickle load error : {0}", error)
cache_item = None
xbmc.sleep(1000)
if cache_item is not None:
item_last_used = -1
if cache_item.date_last_used is not None:
item_last_used = time.time() - cache_item.date_last_used
log.debug("clear_old_cache_data() : Cache item last used : {0} sec ago", item_last_used)
if item_last_used == -1 or item_last_used > (3600 * 24 * 7):
log.debug("clear_old_cache_data() : Deleting cache item age : {0}", item_last_used)
data_file = os.path.join(addon_dir, filename)
with FileLock(data_file + ".locked", timeout=5):
xbmcvfs.delete(data_file)
del_count += 1
else:
log.debug("clear_old_cache_data() : Deleting unloadable cache item")
data_file = os.path.join(addon_dir, filename)
with FileLock(data_file + ".locked", timeout=5):
xbmcvfs.delete(data_file)
log.debug("clear_old_cache_data() : Cache items deleted : {0}", del_count)