From 3beacfdace99b3c97a80c934f4732c0108f36a43 Mon Sep 17 00:00:00 2001 From: InsertDisc <31751462+InsertDisc@users.noreply.github.com> Date: Sun, 19 Jan 2025 14:40:31 -0500 Subject: [PATCH 1/5] Update pattrmm.py Methods added for 'In History' extension specifically to keep track of Trakt list ids. This allows emptying Trakt lists instead of removing and recreating after each run while also avoids using an 'expected' Trakt list slug result which was unreliable. --- pattrmm.py | 354 +++++++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 304 insertions(+), 50 deletions(-) diff --git a/pattrmm.py b/pattrmm.py index 80254e1..58e1524 100644 --- a/pattrmm.py +++ b/pattrmm.py @@ -82,6 +82,10 @@ def log_setup(): # preferences folder verify_or_create_folder("preferences", "preferences") +# trakt list ids +trakt_list_cache = "data/trakt_list_ids.yml" +verify_or_create_file(trakt_list_cache, "Trakt List IDs") + # settings file for pattrmm settings_file = "preferences/settings.yml" # If settings file doesn't exist, create it @@ -315,7 +319,7 @@ def settings(self): me = traktApi('me') slug = cleanPath(self.extension_library) self.slug = slug - trakt_list_meta = f"https://trakt.tv/users/{me}/lists/in-history-{slug}" + trakt_list_meta = f"https://trakt.tv/users/{me}/lists/<>" try: self.trakt_list_privacy = pref['libraries'][self.extension_library]['extensions']['in-history']['trakt_list_privacy'] except KeyError: @@ -399,6 +403,12 @@ def settings(self): self.maximum = maximum except KeyError: self.maximum = None + + try: + limit = pref['libraries'][self.extension_library]['extensions']['by_size']['limit'] + self.limit = limit + except KeyError: + self.limit = 500 try: self.save_folder = pref['libraries'][self.extension_library]['extensions']['by_size']['save_folder'] @@ -3090,6 +3100,251 @@ def __init__(self, id, title, first_air_date, last_air_date, next_air_date, stat print(f''' Extension setting found. Running 'In History' on {this_library} ''') + trakt_access = vars.traktApi('token') + trakt_api = vars.traktApi('client') + trakt_user_name = vars.traktApi('me') + + def trakt_list_state(trakt_list_id): + try: + url = f"https://api.trakt.tv/users/{trakt_user_name}/lists/{trakt_list_id}/items" + response = requests.get(url, headers={ + "Authorization": f"Bearer {trakt_access}", + "trakt-api-version": "2", + "trakt-api-key": trakt_api, + }) + time.sleep(1.25) + if response.status_code == 200: + data = response.json() + if not data: + return "empty" + return "valid" + elif response.status_code == 404: + return "not_found" + else: + raise Exception(f"Failed to fetch list items: {response.status_code} - {response.text}") + except Exception as e: + print(f"Trakt list state function error: {e}") + + def update_trakt_list_file(trakt_list_cache, trakt_list_cache_name, trakt_list_id, trakt_list_slug): + try: + print("Updating Trakt list cache data") + with open(trakt_list_cache, 'r') as trakt_cache_file: + data = yaml.load(trakt_cache_file) + if data is None: + data = {} + + if 'lists' not in data: + data['lists'] = {} + + + if this_library not in data['lists']: + data['lists'][this_library] = {} + + + if trakt_list_name not in data['lists'][this_library]: + data['lists'][this_library][trakt_list_cache_name] = {} + + + data['lists'][this_library][trakt_list_cache_name]['id'] = trakt_list_id + data['lists'][this_library][trakt_list_cache_name]['slug'] = trakt_list_slug + + + with open(trakt_list_cache, 'w') as trakt_cache_file: + yaml.dump(data, trakt_cache_file) + print("Trakt cache file updated.") + except Exception as e: + print(f"Exception: {e}") + + def validate_and_initialize(trakt_list_cache, trakt_list_cache_name, trakt_list_name, this_library): + try: + print("Validating trakt list links.") + with open(trakt_list_cache, 'r') as trakt_cache_file: + data = yaml.load(trakt_cache_file) + if data is None: + print("Initializing Trakt Cache file.") + data = {} + if 'lists' not in data: + data['lists'] = {} + print(f"Building: {data}") + + if this_library not in data['lists']: + data['lists'][this_library] = {} + print(f"Building: {data}") + + if trakt_list_cache_name not in data['lists'][this_library]: + data['lists'][this_library][trakt_list_cache_name] = {} + print(f"Building: {data}") + + create_status, trakt_list_id, trakt_list_slug = create_trakt_list(trakt_list_name, trakt_list_description, trakt_list_privacy, allow_comments=True) + if create_status == 201: + data['lists'][this_library][trakt_list_cache_name]['id'] = trakt_list_id + data['lists'][this_library][trakt_list_cache_name]['slug'] = trakt_list_slug + print(f"Building: {data}") + update_trakt_list_file(trakt_list_cache, trakt_list_cache_name, trakt_list_id, trakt_list_slug) + return True, trakt_list_id, trakt_list_slug + else: + print(f"<> ({create_status})") + return False, None, None + + if data is not None: + if 'lists' not in data: + data['lists'] = {} + print("Creating list structure") + else: + print(f"Structure ok [lists]") + + if this_library not in data['lists']: + data['lists'][this_library] = {} + print(f"Adding {this_library} to [lists]") + else: + print(f"Structure ok [lists][{this_library}]") + + if trakt_list_cache_name not in data['lists'][this_library]: + data['lists'][this_library][trakt_list_cache_name] = {} + print(f"Adding {trakt_list_cache_name} to [lists][{this_library}]") + else: + print(f"Structure ok [lists][{this_library}][{trakt_list_cache_name}]") + + print("Checking for existing Trakt list id") + trakt_list_id = ( + data.get('lists', {}) + .get(this_library, {}) + .get(trakt_list_cache_name, {}) + .get('id', None) + ) + trakt_list_slug = ( + data.get('lists', {}) + .get(this_library, {}) + .get(trakt_list_cache_name, {}) + .get('slug', None) + ) + + if trakt_list_id is None: + print(f"No Trakt list id found for [lists][{this_library}][{trakt_list_cache_name}]") + print(f"Creating {trakt_list_name} list on Trakt") + create_status, trakt_list_id, trakt_list_slug = create_trakt_list(trakt_list_name, trakt_list_description, trakt_list_privacy, allow_comments=True) + if create_status == 201: + print(f"List ({trakt_list_name}) created <>") + data['lists'][this_library][trakt_list_cache_name]['id'] = trakt_list_id + data['lists'][this_library][trakt_list_cache_name]['slug'] = trakt_list_slug + update_trakt_list_file(trakt_list_cache, trakt_list_cache_name, trakt_list_id, trakt_list_slug) + return True, trakt_list_id, trakt_list_slug + else: + print(f"Fail <> ({create_status})") + return False, None, None + + elif trakt_list_id is not None: + print("ID found") + print("Validating Trakt List") + validate_trakt_list = trakt_list_state(trakt_list_id) + if validate_trakt_list == "not_found": + print(f"Trakt list {trakt_list_id} could not be found.") + print("Creating a new list and updating cache information.") + create_status, trakt_list_id, trakt_list_slug = create_trakt_list(trakt_list_name, trakt_list_description, trakt_list_privacy, allow_comments=True) + if create_status == 201: + print(f"List ({trakt_list_name}) created <>") + update_trakt_list_file(trakt_list_cache, trakt_list_cache_name, trakt_list_id, trakt_list_slug) + return True, trakt_list_id, trakt_list_slug + else: + print(f"<> ({create_status})") + return False, None, None + + elif validate_trakt_list == "empty": + print("Trakt list info valid. List exists but is empty") + return "EMPTY", trakt_list_id, trakt_list_slug + + elif validate_trakt_list == "valid": + print("Trakt list is valid and populated") + return True, trakt_list_id, trakt_list_slug + except Exception as e: + print(f"Failed to validate. Error: {e}") + + + + def remove_trakt_list_items(list_id, items): + remove_url = f"https://api.trakt.tv/users/{trakt_user_name}/lists/{list_id}/items/remove" + print(f"Removing items from trakt list ({list_id})") + payload = { + f"{plex.library.type(this_library)}s": [] + } + if not items: + print("The list of items is empty!") + return + for item in items: + if item['type'] == plex.library.type(this_library): + payload[f"{plex.library.type(this_library)}s"].append({ + "ids": item[f"{plex.library.type(this_library)}"]['ids'] + }) + if payload[f"{plex.library.type(this_library)}s"]: + response = requests.post(remove_url, json=payload, headers={ + "Authorization": f"Bearer {trakt_access}", + "trakt-api-version": "2", + "trakt-api-key": f"{trakt_api}", + }) + if response.status_code == 200: + print(f"Successfully removed the {plex.library.type(this_library)}s from list {list_id}.") + else: + raise Exception(f"Failed to remove items from list {list_id}: {response.status_code} - {response.text}") + else: + print("No data to remove.") + + def add_trakt_list_items(trakt_list_id, trakt_list_items): + url = f"https://api.trakt.tv/users/{trakt_user_name}/lists/{trakt_list_id}/items" + response = requests.post(url, headers={ + "Content-Type": "application/json", + "Authorization": f"Bearer {trakt_access}", + "trakt_api_version": "2", + "trakt-api-key": trakt_api, + }, + data=trakt_list_items) + time.sleep(1.25) + if response.status_code == 201: + print("Items successfully posted") + return response + + def get_trakt_list_items(list_id): + url = f"https://api.trakt.tv/users/{trakt_user_name}/lists/{list_id}/items" + response = requests.get(url, headers={ + "Authorization": f"Bearer {trakt_access}", + "trakt-api-version": "2", + "trakt-api-key": trakt_api, + }) + + if response.status_code == 200: + print("Trakt list items fetched.") + list_items = response.json() + return list_items + else: + raise Exception(f"Failed to fetch list items: {response.status_code} - {response.text}") + + def create_trakt_list(trakt_list_name, trakt_list_description, trakt_list_privacy, allow_comments=True): + api_url = f"https://api.trakt.tv/users/{trakt_user_name}/lists" + + headers = { + "Content-Type": "application/json", + "Authorization": f"Bearer {trakt_access}", + "trakt-api-version": "2", + "trakt-api-key": trakt_api, + } + + payload = { + "name": trakt_list_name, + "description": trakt_list_description, + "privacy": trakt_list_privacy, + "allow_comments": allow_comments, + } + + response = requests.post(api_url, json=payload, headers=headers) + create_status = response.status_code + if response.status_code == 201: # Created + time.sleep(1.25) + data = response.json() + trakt_list_id = data['ids']['trakt'] + trakt_list_slug = data['ids']['slug'] + return create_status, trakt_list_id, trakt_list_slug + else: + raise Exception(f"Failed to create list: {response.status_code} - {response.text}") + logging.info(f"Extension setting found. Running 'In History' on {this_library}") in_history_settings = vars.Extensions(this_library).in_history.settings() pmm_in_history_folder = pmm_config_path_prefix + in_history_settings.save_folder @@ -3109,7 +3364,6 @@ def __init__(self, id, title, first_air_date, last_air_date, next_air_date, stat print(f"Exception: {str(sf)}") logging.warning(f"Exception: {str(sf)}") in_history_range = in_history_settings.range - trakt_user_name = vars.traktApi('me') library_clean_path = vars.cleanPath(in_history_settings.slug) collection_title = in_history_settings.collection_title in_history_meta = in_history_settings.meta @@ -3137,14 +3391,14 @@ def __init__(self, id, title, first_air_date, last_air_date, next_air_date, stat create_in_history_file.close() print(f"File created") logging.info(f"File created") - in_history_file_location = f"config/{in_history_settings.save_folder}{library_clean_path}-in-history.yml" + in_history_file_location = f"{in_history_file}" print(f"{in_history_file_location}") logging.info(f"{in_history_file_location}") except Exception as e: print(f"An error occurred: {e}") else: print(f"Updating {this_library} 'In History' collection file..") - logging.info(f"Updating {this_library} 'In History' collectioin file..") + logging.info(f"Updating {this_library} 'In History' collection file..") in_history_file_location = f"config/{in_history_settings.save_folder}{library_clean_path}-in-history.yml" print(f"{in_history_file_location}") logging.info(f"{in_history_file_location}") @@ -3154,19 +3408,19 @@ def __init__(self, id, title, first_air_date, last_air_date, next_air_date, stat for key, value in loaded_in_history_yaml['collections'].items(): if key != collection_title: - print(f'''Collection for {this_library} has been changed from {key} ==> {collection_title} + print(f'''Collection name for {this_library} has been changed from {key} ==> {collection_title} Attempting to remove unused collection.''') - logging.info(f'''Collection for {this_library} has been changed from {key} ==> {collection_title} + logging.info(f'''Collection name for {this_library} has been changed from {key} ==> {collection_title} Attempting to remove unused collection.''') library_id = vars.plexGet(this_library) old_collection_id = plex.collection.id(key, library_id) delete_old_collection = plex.collection.delete(old_collection_id) if delete_old_collection: - print(f"Successfully removed old '{key}' collection.") - logging.info(f"Successfully removed old '{key}' collection.") + print(f"Successfully removed old '{key}' collection from Plex.") + logging.info(f"Successfully removed old '{key}' collection from Plex.") else: - print(f"Could not remove deprecated '{key}' collection.") - logging.warning(f"Could not remove deprecated '{key}' collection.") + print(f"Could not remove deprecated '{key}' collection from Plex.") + logging.warning(f"Could not remove deprecated '{key}' collection from Plex.") with open(in_history_file, "w") as write_in_history_file: write_in_history_file.write(in_history_meta_str) @@ -3181,7 +3435,6 @@ def __init__(self, id, title, first_air_date, last_air_date, next_air_date, stat "July", "August", "September", "October", "November", "December" ] - if in_history_range == 'day': today = datetime.now() start_date = today @@ -3213,38 +3466,22 @@ def __init__(self, id, title, first_air_date, last_air_date, next_air_date, stat if description_identifier == 'movie': description_type = 'Movies' trakt_type = 'movies' - trakt_access = vars.traktApi('token') - trakt_api = vars.traktApi('client') - trakt_headers = { - 'Content-Type': 'application/json', - 'Authorization': 'Bearer ' + trakt_access + '', - 'trakt-api-version': '2', - 'trakt-api-key': '' + trakt_api + '' - } - trakt_list_url = f"https://api.trakt.tv/users/{trakt_user_name}/lists" - trakt_list_url_post = f"https://api.trakt.tv/users/{trakt_user_name}/lists/in-history-{library_clean_path}" - trakt_list_url_post_items = f"https://api.trakt.tv/users/{trakt_user_name}/lists/in-history-{library_clean_path}/items" - trakt_list_data = f''' -{{ - "name": "In History {this_library}", - "description": "{description_type} released this {in_history_range} in history.", - "privacy": "{in_history_settings.trakt_list_privacy}", - "display_numbers": true, - "allow_comments": true, - "sort_by": "rank", - "sort_how": "asc" -}} - ''' - print("Clearing " + this_library + " trakt list...") - logging.info("Clearing " + this_library + " trakt list...") - trakt_delete_list = requests.delete(trakt_list_url_post, headers=trakt_headers) - if trakt_delete_list.status_code == 201 or 200 or 204: - print("List cleared") - time.sleep(1.25) - trakt_make_list = requests.post(trakt_list_url, headers=trakt_headers, data=trakt_list_data) - if trakt_make_list.status_code == 201 or 200 or 204: - print("Initialization successful.") - time.sleep(1.25) + + # Prepare the list name + trakt_list_cache_name = f"{in_history_range}_in_history" + trakt_list_name=f"In History {this_library} - {in_history_range}" + trakt_list_description=f"{description_type} released this {in_history_range} in history." + trakt_list_privacy=f"{in_history_settings.trakt_list_privacy}" + + valid, trakt_list_id, trakt_list_slug = validate_and_initialize(trakt_list_cache, trakt_list_cache_name, trakt_list_name, this_library) + + if valid is True: + if trakt_list_state(trakt_list_id) == "valid": + print("List is validated") + trakt_list_items = get_trakt_list_items(trakt_list_id) + remove_trakt_list_items(trakt_list_id, trakt_list_items) + time.sleep(1.5) + trakt_list_items = ''' {''' trakt_list_items += f''' @@ -3308,7 +3545,19 @@ def __init__(self, id, title, first_air_date, last_air_date, next_air_date, stat } ''' - post_items = requests.post(trakt_list_url_post_items, headers=trakt_headers, data=trakt_list_items) + post_items = add_trakt_list_items(trakt_list_id, trakt_list_items) + + try: + print("Updating collection file data...") + with open(in_history_file, 'r') as in_history_collection_file: + current_collection_data = yaml.load(in_history_collection_file) + current_collection_data['collections'][collection_title]['trakt_list'] = f"https://trakt.tv/users/{trakt_user_name}/lists/{trakt_list_id}" + with open(in_history_file, 'w') as in_history_collection_file: + yaml.dump(current_collection_data, in_history_collection_file) + print("Collection file url synced") + except Exception as e: + print(f"Error updating collection file url: {e}") + if post_items.status_code == 201: print(f''' Successfully posted This {in_history_range} In History items for {this_library}''') @@ -3386,19 +3635,19 @@ def __init__(self, id, title, first_air_date, last_air_date, next_air_date, stat for key, value in check_BySize_Title['collections'].items(): if key != collection_title: - print(f'''Collection for {this_library} has been changed from {key} ==> {collection_title} + print(f'''Collection name for {this_library} has been changed from {key} ==> {collection_title} Attempting to remove unused collection.''') - logging.info(f'''Collection for {this_library} has been changed from {key} ==> {collection_title} + logging.info(f'''Collection name for {this_library} has been changed from {key} ==> {collection_title} Attempting to remove unused collection.''') library_id = vars.plexGet(this_library) old_collection_id = plex.collection.id(key, library_id) delete_old_collection = plex.collection.delete(old_collection_id) if delete_old_collection == True: - print(f"Successfully removed old '{key}' collection.") - logging.info(f"Successfully removed old '{key}' collection.") + print(f"Successfully removed old '{key}' collection from Plex.") + logging.info(f"Successfully removed old '{key}' collection from Plex.") if delete_old_collection == False: - print(f"Could not remove deprecated '{key}' collection.") - logging.warning(f"Could not remove deprecated '{key}' collection.") + print(f"Could not remove deprecated '{key}' collection from Plex.") + logging.warning(f"Could not remove deprecated '{key}' collection from Plex.") with open(by_size_file, "w") as write_by_size_file: write_by_size_file.write(by_size_meta_str) @@ -3412,6 +3661,7 @@ def __init__(self, id, title, first_air_date, last_air_date, next_air_date, stat reverse_value = by_size_settings.reverse minimum = by_size_settings.minimum maximum = by_size_settings.maximum + list_limit = by_size_settings.limit movies_list = sorted(movies_list, key=lambda x: getattr(x, sort_key), reverse=reverse_value) movies_list = [ movie for movie in movies_list @@ -3420,8 +3670,12 @@ def __init__(self, id, title, first_air_date, last_air_date, next_air_date, stat (maximum is None or movie.size <= maximum) ) ] + movies_list = movies_list[:list_limit] print(f'''Sorting {this_library} by '{by_size_settings.order_by_field}.{by_size_settings.order_by_direction}'.''') + print(f"Minimum size: {minimum}") + print(f"Maximum size: {maximum}") + print(f"Limit: {list_limit}") library_clean_path = vars.cleanPath(this_library) trakt_user_name = vars.traktApi('me') From e337202b5fe56104667702ccc22827ae2eb4f7ad Mon Sep 17 00:00:00 2001 From: InsertDisc <31751462+InsertDisc@users.noreply.github.com> Date: Sun, 19 Jan 2025 14:51:39 -0500 Subject: [PATCH 2/5] Remove vars.py creation from pattrmm.py --- pattrmm.py | 1412 +--------------------------------------------------- 1 file changed, 1 insertion(+), 1411 deletions(-) diff --git a/pattrmm.py b/pattrmm.py index 58e1524..b895e9b 100644 --- a/pattrmm.py +++ b/pattrmm.py @@ -203,1420 +203,10 @@ def log_setup(): # Check for vars file and create if not present vars_file_exists = os.path.exists(vars_file) if not vars_file_exists: - print("Creating vars module file..") - create_vars_file = open(vars_file, "x") - create_vars_file.write(""" -#vars:nightly -from ruamel.yaml import YAML -yaml = YAML() -yaml.preserve_quotes = True -import xml.etree.ElementTree as ET -import requests -import json -import re -import datetime -import tzlocal -today = datetime.datetime.today() -import os -import sys -library = "" - -is_docker = os.environ.get('PATTRMM_DOCKER', "False") - -if is_docker == "True": - configPathPrefix = "./config/" - - -if is_docker == "False": - configPathPrefix = "../" - -import logging -log_file = "data/logs/pattrmm.log" -logging.basicConfig(filename=log_file, level=logging.INFO, format='%(asctime)s - %(levelname)s: %(message)s', datefmt="%Y-%m-%d %H:%M:%S") - - -config_path = configPathPrefix + 'config.yml' -settings_path = 'preferences/settings.yml' - -def get_os(): - if sys.platform.startswith('linux'): - return 'Linux' - elif sys.platform.startswith('darwin'): - return 'macOS' - elif sys.platform.startswith('win'): - return 'Windows' - else: - return 'Unknown' - -def date_within_range(item_date, start_date, end_date): - if (start_date.month, start_date.day) <= (end_date.month, end_date.day): - return ( - (start_date.month, start_date.day) <= - (item_date.month, item_date.day) <= - (end_date.month, end_date.day) - ) + print("VARS module missing. Please download the needed vars module from the the 'Nightly' Repo.") else: - return ( - (item_date.month, item_date.day) >= - (start_date.month, start_date.day) - or - (item_date.month, item_date.day) <= - (end_date.month, end_date.day) - ) - -class LibraryList: - def __init__(self, title, date, ratingKey): - self.title = title - self.date = datetime.datetime.strptime(date, '%Y-%m-%d').date() - self.ratingKey = ratingKey - -class ExtendedLibraryList: - def __init__(self, ratingKey, title, added, released, size): - self.ratingKey = ratingKey - self.title = title - self.added = added - self.released = released - self.size = size - -class itemBase: - def __init__(self, title, date, details): - self.title = re.sub(r"\s\(.*?\)","", title) - self.date = datetime.datetime.strptime(date, '%Y-%m-%d').date() - self.details = details - - -class itemDetails: - def __init__(self, ratingKey, imdb, tmdb, tvdb): - self.ratingKey = ratingKey - self.imdb = imdb - self.tmdb = tmdb - self.tvdb = tvdb - -class Extensions: - def __init__(self, extension_library): - self.extension_library = extension_library - - @property - def in_history(self): - self.context = 'in_history' - return self - - @property - def by_size(self): - self.context = 'by_size' - return self - - @property - def missing_episodes(self): - self.context = 'missing_episodes' - return self - - def settings(self): - if self.context == 'in_history': - settings = settings_path - with open(settings) as sf: - pref = yaml.load(sf) - me = traktApi('me') - slug = cleanPath(self.extension_library) - self.slug = slug - trakt_list_meta = f"https://trakt.tv/users/{me}/lists/<>" - try: - self.trakt_list_privacy = pref['libraries'][self.extension_library]['extensions']['in-history']['trakt_list_privacy'] - except KeyError: - self.trakt_list_privacy = 'private' - try: - range = pref['libraries'][self.extension_library]['extensions']['in-history']['range'] - range_lower = range.lower() - self.range = range_lower - except KeyError: - self.range = 'day' - try: - self.save_folder = pref['libraries'][self.extension_library]['extensions']['in-history']['save_folder'] - except KeyError: - self.save_folder = '' - try: - self.collection_title = pref['libraries'][self.extension_library]['extensions']['in-history']['collection_title'] - except KeyError: - self.collection_title = 'This {{range}} in history' - if "{{range}}" in self.collection_title: - self.collection_title = self.collection_title.replace("{{range}}", self.range) - if "{{Range}}" in self.collection_title: - self.collection_title = self.collection_title.replace("{{Range}}", self.range.capitalize()) - try: - self.starting = pref['libraries'][self.extension_library]['extensions']['in-history']['starting'] - except KeyError: - self.starting = 0 - try: - self.ending = pref['libraries'][self.extension_library]['extensions']['in-history']['ending'] - except KeyError: - self.ending = today.year - try: - self.increment = pref['libraries'][self.extension_library]['extensions']['in-history']['increment'] - except KeyError: - self.increment = 1 - try: - try: - options = { - key: value - for key, value in pref['libraries'][self.extension_library]['extensions']['in-history']['meta'].items() - } - if "sort_title" in options: - options['sort_title'] = '"' + options['sort_title'] + '"' - except KeyError: - options = {} - poster_url = f'"https://raw.githubusercontent.com/meisnate12/Plex-Meta-Manager-Images/master/chart/This%20{self.range.capitalize()}%20in%20History.jpg"' - self.meta = {} - self.meta['collections'] = {} - self.meta['collections'][self.collection_title] = {} - self.meta['collections'][self.collection_title]['trakt_list'] = trakt_list_meta - self.meta['collections'][self.collection_title]['visible_home'] = 'true' - self.meta['collections'][self.collection_title]['visible_shared'] = 'true' - self.meta['collections'][self.collection_title]['collection_order'] = 'custom' - self.meta['collections'][self.collection_title]['sync_mode'] = 'sync' - self.meta['collections'][self.collection_title]['url_poster'] = poster_url - self.meta['collections'][self.collection_title].update(options) - - except Exception as e: - return f"Error: {str(e)}" - return self - - if self.context == 'by_size': - settings = settings_path - with open(settings) as sf: - pref = yaml.load(sf) - me = traktApi('me') - slug = cleanPath(self.extension_library) - self.slug = slug - trakt_list_meta = f"https://trakt.tv/users/{me}/lists/sorted-by-size-{slug}" - try: - self.trakt_list_privacy = pref['libraries'][self.extension_library]['extensions']['by_size']['trakt_list_privacy'] - except KeyError: - self.trakt_list_privacy = 'private' - try: - minimum = pref['libraries'][self.extension_library]['extensions']['by_size']['minimum'] - self.minimum = minimum - except KeyError: - self.minimum = 0 - - try: - maximum = pref['libraries'][self.extension_library]['extensions']['by_size']['maximum'] - self.maximum = maximum - except KeyError: - self.maximum = None - - try: - limit = pref['libraries'][self.extension_library]['extensions']['by_size']['limit'] - self.limit = limit - except KeyError: - self.limit = 500 - - try: - self.save_folder = pref['libraries'][self.extension_library]['extensions']['by_size']['save_folder'] - except KeyError: - self.save_folder = '' - try: - self.collection_title = pref['libraries'][self.extension_library]['extensions']['by_size']['collection_title'] - except KeyError: - self.collection_title = 'Sorted by size' - try: - default_order_by = 'size.desc' - order_by = pref['libraries'][self.extension_library]['extensions']['by_size']['order_by'] - possible_filters = ('size.desc', 'size.asc', 'title.desc', 'title.asc', 'added.asc', 'added.desc', 'released.desc', 'released.asc') - possible_fields = ('size', 'title', 'added', 'released') - if order_by in possible_filters: - self.order_by = order_by - if order_by not in possible_filters: - if order_by in possible_fields: - invalid_order_by = order_by - if order_by == 'title': - order_by = order_by + '.asc' - else: - order_by = order_by + '.desc' - print(f'''Invalid order by setting "{invalid_order_by}". - Order by field '{invalid_order_by}' found. Using '{order_by}'.''') - logging.warning(f'''Invalid order by setting "{order_by}", falling back to default {default_order_by}''') - if order_by not in possible_fields: - print(f'''{order_by} is not a valid option. Using default.''') - self.order_by = default_order_by - except KeyError: - print(f'''No list order setting found. Using default '{default_order_by}'.''') - logging.info(f'''No list order setting found. Using default '{default_order_by}'.''') - self.order_by = default_order_by - - self.order_by_field, self.order_by_direction = self.order_by.split('.') - if self.order_by_direction == 'desc': - self.reverse = True - if self.order_by_direction == 'asc': - self.reverse = False - - try: - try: - options = { - key: value - for key, value in pref['libraries'][self.extension_library]['extensions']['by_size']['meta'].items() - } - if "sort_title" in options: - options['sort_title'] = '"' + options['sort_title'] + '"' - except KeyError: - options = {} - self.meta = {} - self.meta['collections'] = {} - self.meta['collections'][self.collection_title] = {} - self.meta['collections'][self.collection_title]['trakt_list'] = trakt_list_meta - self.meta['collections'][self.collection_title]['visible_home'] = 'true' - self.meta['collections'][self.collection_title]['visible_shared'] = 'true' - self.meta['collections'][self.collection_title]['collection_order'] = 'custom' - self.meta['collections'][self.collection_title]['sync_mode'] = 'sync' - self.meta['collections'][self.collection_title].update(options) - - except Exception as e: - return f"Error: {str(e)}" - return self - - if self.context == 'missing_episodes': - settings = settings_path - print(settings_path) - with open(settings) as sf: - pref = yaml.load(sf) - try: - self.overlay_save_folder = pref['libraries'][self.extension_library]['extensions']['missing_episodes']['overlay_save_folder'] - except KeyError: - self.overlay_save_folder = 'overlays/' - try: - self.monitored_only = pref['libraries'][self.extension_library]['extensions']['missing_episodes']['monitored_only'] - except KeyError: - self.monitored_only = False - try: - self.style = pref['libraries'][self.extension_library]['extensions']['missing_episodes']['style'] - except KeyError: - self.style = 'dot' - - if self.style == 'icon': - self.display_style_present = f''' - template: {{name: Missing_Episodes, this_overlay_name: all-episodes-present, back_height: 30, back_width: 30, back_color: "#FFFFFF", back_line_width: 10, back_line_color: "#FFFFFF", back_radius: 50, horizontal_offset: 30, vertical_offset: 30}} - ''' - self.display_style_missing = f''' - template: {{name: Missing_Episodes, this_overlay_name: not-all-episodes-present, back_height: 30, back_width: 30, back_color: "#FFFFFF", back_line_width: 10, back_line_color: "#FFFFFF", back_radius: 50, horizontal_offset: 30, vertical_offset: 30}} - ''' - if self.style == 'dot': - self.display_style_present = f''' - template: {{name: Missing_Episodes, back_height: 30, back_width: 30, back_color: "#FFFFFF", back_line_width: 10, back_line_color: "#FFFFFF", back_radius: 50, horizontal_offset: 30, vertical_offset: 30}} - ''' - self.display_style_missing = f''' - template: {{name: Missing_Episodes, back_height: 30, back_width: 30, back_color: "#FFFFFF00", back_line_width: 10, back_line_color: "#FFFFFF", back_radius: 50, horizontal_offset: 30, vertical_offset: 30}} - ''' - return self - - - - -class Plex: - def __init__(self, plex_url, plex_token, tmdb_api_key): - self.plex_url = plex_url - self.plex_token = plex_token - self.tmdb_api_key = tmdb_api_key - self.context = None - - @property - def library(self): - self.context = 'library' - return self # Return self to allow method chaining - - @property - def collection(self): - self.context = 'collection' - return self # Return self to allow method chaining - - @property - def item(self): - self.context = 'item' - return self # Return self to allow method chaining - - @property - def show(self): - self.context = 'show' - return self # Return self to allow method chaining - - @property - def shows(self): - self.context = 'shows' - return self # Return self to allow method chaining - - @property - def movie(self): - self.context = 'movie' - return self # Return self to allow method chaining - - @property - def movies(self): - self.context = 'movies' - return self # Return self to allow method chaining - - - def type(self, library): - library_details_url = f"{self.plex_url}/library/sections" - library_details_url = re.sub("0//", "0/", library_details_url) - headers = {"X-Plex-Token": self.plex_token, - "accept": "application/json"} - response = requests.get(library_details_url, headers=headers) - data = response.json() - for section in data['MediaContainer']['Directory']: - if section["title"] == library: - library_type = section["type"] - - return library_type - - - - def info(self, ratingKey): - - if self.context == 'item': - movie_details_url = f"{self.plex_url}/library/metadata/{ratingKey}" - movie_details_url = re.sub("0//", "0/", movie_details_url) - headers = {"X-Plex-Token": self.plex_token, - "accept": "application/json"} - response = requests.get(movie_details_url, headers=headers) - if response.status_code == 200: - imdbID = "Null" - tmdbID = "Null" - tvdbID = "Null" - - data = response.json() - extendedDetails = response.json() - try: - data = data['MediaContainer']['Metadata'] - for item in data: - title = item.get('title') - if item.get('originallyAvailableAt'): - date = item.get('originallyAvailableAt') - else: - date = "Null" - key = item.get('ratingKey') - except: - None - try: - dataDetails = extendedDetails['MediaContainer']['Metadata'][0]['Guid'] - for guid_item in dataDetails: - guid_id = guid_item.get('id') - if guid_id.startswith("tmdb://"): - tmdbID = guid_item.get('id')[7:] - if guid_id.startswith("imdb://"): - imdbID = guid_item.get('id')[7:] - if guid_id.startswith("tvdb://"): - tvdbID = guid_item.get('id')[7:] - except KeyError: - return itemBase(title=title, date=date, details=itemDetails(key, imdbID, tmdbID, tvdbID)) - return itemBase(title=title, date=date, details=itemDetails(key, imdbID, tmdbID, tvdbID)) - - - def list(self, library): - try: - # Replace with the correct section ID and library URL - section_id = plexGet(library) # Replace with the correct section ID - library_url = f"{self.plex_url}/library/sections/{section_id}/all" - library_url = re.sub("0//", "0/", library_url) - headers = {"X-Plex-Token": self.plex_token, - "accept": "application/json"} - response = requests.get(library_url, headers=headers) - library_list = [] - - if response.status_code == 200: - data = response.json() - for item in data['MediaContainer']['Metadata']: - try: - check_if_has_date = item['originallyAvailableAt'] - - library_list.append(LibraryList(title=item['title'],ratingKey=item['ratingKey'], date=item['originallyAvailableAt'])) - except KeyError: - print(f"{item['title']} has no 'Originally Available At' date. Ommitting title.") - continue - return library_list - else: - return f"Error: {response.status_code} - {response.text}" - except Exception as e: - return f"Error: {str(e)}" - - def extended_list(self, library): - try: - # Replace with the correct section ID and library URL - section_id = plexGet(library) # Replace with the correct section ID - library_url = f"{self.plex_url}/library/sections/{section_id}/all" - library_url = re.sub("0//", "0/", library_url) - headers = {"X-Plex-Token": self.plex_token, - "accept": "application/json"} - response = requests.get(library_url, headers=headers) - extended_library_list = [] - - if response.status_code == 200: - data = response.json() - for item in data['MediaContainer']['Metadata']: - try: - title = item['title'] - ratingKey = item['ratingKey'] - released = item['originallyAvailableAt'] - added_at_str = item['addedAt'] - added_at_timestamp = abs(int(added_at_str)) - added_dt_object = datetime.datetime.utcfromtimestamp(added_at_timestamp) - added_at = added_dt_object.strftime('%Y-%m-%d') - size_str = item['Media'][0]['Part'][0]['size'] - size_bytes = int(size_str) - file_size_gb = size_bytes / 1073741824 - extended_library_list.append(ExtendedLibraryList(**{ - 'ratingKey': ratingKey, - 'title': title, - 'added': added_at, - 'released': released, - 'size': file_size_gb - })) - except KeyError: - print(f"{item['title']} has no 'Originally Available At' date. Ommitting title.") - continue - return extended_library_list - else: - return f"Error: {response.status_code} - {response.text}" - except Exception as e: - return f"Error: {str(e)}" - - def id(self, name, library_id=None): - if self.context == 'show': - try: - # Replace with the correct section ID and library URL - section_id = plexGet(library) # Replace with the correct section ID - library_url = f"{self.plex_url}/library/sections/{section_id}/all" - library_url = re.sub("0//", "0/", library_url) - headers = {"X-Plex-Token": self.plex_token} - response = requests.get(library_url, headers=headers) - - if response.status_code == 200: - data = response.json() - for item in data['MediaContainer']['Metadata']: - if item['type'] == 'show' and item['title'] == name: - return f"ID for show '{name}': {item['ratingKey']}" - return f"Show '{name}' not found" - else: - return f"Error: {response.status_code} - {response.text}" - except Exception as e: - return f"Error: {str(e)}" - - if self.context == 'movie': - try: - num = 1 + 1 # get movie id here - - except Exception as e: - return f"Error: {str(e)}" - - if self.context == 'collection': - try: - section_id = library_id - collection_name = name - collection_url = f"{self.plex_url}/library/sections/{section_id}/collections" - collection_url = re.sub("0//", "0/", collection_url) - headers = {"X-Plex-Token": self.plex_token, - "accept": "application/json"} - response = requests.get(collection_url, headers=headers) - if response.status_code == 200: - collections_data = response.json() - for collection in collections_data['MediaContainer']['Metadata']: - if collection['title'] == collection_name: - collection_id = collection['ratingKey'] - return collection_id - except Exception as e: - return f"Error: {str(e)}" - - def delete(self, key): - if self.context == 'collection': - try: - collection_id = key - collection_delete_url = f"{self.plex_url}/library/collections/{collection_id}" - collection_delete_url = re.sub("0//", "0/", collection_delete_url) - headers = {"X-Plex-Token": self.plex_token, - "accept": "application/json"} - response = requests.delete(collection_delete_url, headers=headers) - if response.status_code == 200: - return True - elif response.status_code != 200: - return False - except Exception as e: - return f"Error: {str(e)}" - - def tmdb_id(self, rating_key): - # Attempt to retrieve TMDB ID from Plex - plex_tmdb_id = self.get_tmdb_id_from_plex(rating_key) - - if plex_tmdb_id is not None: - return plex_tmdb_id - - # If not found in Plex, search TMDB - if plex_tmdb_id == None: - show_name = self.get_show_name(rating_key) - year = self.year(rating_key) - if year != None: - print("") - print("No TMDB ID found locally: Searching for " + show_name + " with year " + str(year)) - logging.info("No TMDB ID found locally: Searching for " + show_name + " with year " + str(year)) - search = self.search_tmdb_id(show_name, year) - if search == None: - year = int(year) - year += 1 - print("No results, searching again with year " + str(year)) - logging.info("No results, searching again with year " + str(year)) - search = self.search_tmdb_id(show_name, str(year)) - if search == None: - year -= 2 - print("No results, searching again with year " + str(year)) - logging.info("No results, searching again with year " + str(year)) - search = self.search_tmdb_id(show_name, str(year)) - if search == None: - print(show_name + " could not be matched.") - logging.info(show_name + " could not be matched.") - search = "null" - - return search - - if year == None: - print("") - print("No originally availabe year for " + show_name + ", cannot search for title reliably.") - logging.warning("No originally availabe year for " + show_name + ", cannot search for title reliably.") - search = "null" - return search - - - - def get_tmdb_id_from_plex(self, rating_key): - try: - show_details_url = f"{self.plex_url}/library/metadata/{rating_key}" - show_details_url = re.sub("0//", "0/", show_details_url) - headers = {"X-Plex-Token": self.plex_token} - response = requests.get(show_details_url, headers=headers) - if response.status_code == 200: - root = ET.fromstring(response.text) - guid_elements = root.findall('.//Guid') - for guid_element in guid_elements: - if guid_element.get('id', '').startswith('tmdb://'): - tmdb_id = guid_element.get('id')[7:] - #tmdb_id = guid.split('tmdb://')[1] - return tmdb_id - return None - else: - return f"Error: {response.status_code} - {response.text}" - except Exception as e: - return f"Error: {str(e)}" - - - def get_show_name(self, rating_key): - try: - show_details_url = f"{self.plex_url}/library/metadata/{rating_key}" - show_details_url = re.sub("0//", "0/", show_details_url) - headers = {"X-Plex-Token": self.plex_token, - "accept": "application/json" - } - - # Make a request to get show details - response = requests.get(show_details_url, headers=headers) - - if response.status_code == 200: - data = json.loads(json.dumps(response.json())) - values = data['MediaContainer']['Metadata'] - for result in values: - title = result['title'] - title = re.sub(r"\s\(.*?\)","", title) - return title - else: - return f"Error: {response.status_code} - {response.text}" - except Exception as e: - return f"Error: {str(e)}" - - def retry_search_with_adjusted_years(self, title, year): - for i in range(2): - if i == 0: - year += 1 - elif i == 1: - year -= 2 - - tmdb_search_result = self.search_tmdb_id(title, year) - if tmdb_search_result is not None: - return tmdb_search_result - - return "null" - - - def year(self, rating_key): - try: - # Get the originally available year from Plex - show_details_url = f"{self.plex_url}/library/metadata/{rating_key}" - show_details_url = re.sub("0//", "0/", show_details_url) - headers = {"X-Plex-Token": self.plex_token, - "accept": "application/json"} - - response = requests.get(show_details_url, headers=headers) - - if response.status_code == 200: - data = json.loads(json.dumps(response.json())) - for result in data['MediaContainer']['Metadata']: - try: - year = result['originallyAvailableAt'][:4] - except KeyError: - year = None - return year - else: - return None - except Exception as e: - return None - - def search_tmdb_id(self, title, year): - try: - # Query TMDB to search for a show based on title and year - tmdb_api_url = "https://api.themoviedb.org/3/search/tv" - tmdb_api_key = self.tmdb_api_key - tmdb_headers = { - 'accept': 'application/json' - } - tmdb_params = { - "api_key": tmdb_api_key, - "query": title, - "first_air_date_year": year - } - tmdb_response = requests.get(tmdb_api_url, headers=tmdb_headers, params=tmdb_params) - - if tmdb_response.status_code == 200: - tmdb_data = json.loads(json.dumps(tmdb_response.json())) - if tmdb_data['total_results'] > 0: - for item in tmdb_data['results']: - if item['first_air_date'][:4] == year: - id = item['id'] - - return id - if tmdb_data['total_results'] == 0: - return None - - except Exception as e: - return e - - - def episodes(self, rating_key): - try: - # Retrieve a list of episodes for a show based on rating key - episodes_url = f"{self.plex_url}/library/metadata/{rating_key}/allLeaves" - episodes_url = re.sub("0//", "0/", episodes_url) - headers = {"X-Plex-Token": self.plex_token, - "accept": "application/json"} - response = requests.get(episodes_url, headers=headers) - - if response.status_code == 200: - tree = ET.ElementTree(ET.fromstring(response.text)) - root = tree.getroot() - episodes = [] - for video in root.iter('Video'): - if video.get('type') == 'episode': - episodes.append(video.get('title')) - return episodes - else: - return None - except Exception as e: - return None - -def read_config(): - config_file = config_path - with open(config_file, "r") as yaml_file: - config = yaml.load(yaml_file) - plex_url = config['plex']['url'] - plex_token = config['plex']['token'] - tmdb_api_key = config['tmdb']['apikey'] - return plex_url, plex_token, tmdb_api_key - -if __name__ == "__main__": - plex_url, plex_token, tmdb_api_key = read_config() - if plex_url and plex_token and tmdb_api_key: - plex = Plex(plex_url, plex_token, tmdb_api_key) - -def history(libraryCleanPath, stat): - stats = "./data/history/" + libraryCleanPath + "-history.json" - statsFile = open(stats, "r") - try: - statsData = json.load(statsFile) - statsFile.close() - if stat == "lastFull": - lastRefresh = statsData['lastRefresh'] - except: - lastRefresh = today - return lastRefresh - -def librarySetting(library, value): - - yaml = YAML() - settings = settings_path - with open(settings) as sf: - pref = yaml.load(sf) - if value == 'returning-soon': - try: - entry = pref['libraries'][library]['returning-soon'] - except KeyError: - entry = True - if entry not in (True, False): - print(f"Invalid setting returning-soon: '{entry}' for {library}, defaulting to True") - entry = True - if value == 'refresh': - try: - entry = pref['libraries'][library]['refresh'] - except KeyError: - entry = 15 - if value == 'days': - try: - entry = pref['libraries'][library]['days_ahead'] - if entry > 90: - entry = 90 - except: - entry = 30 - - if value == 'save_folder': - try: - entry = pref['libraries'][library]['save_folder'] - except KeyError: - entry = '' - - if value == 'font_path': - try: - entry = pref['libraries'][library]['font_path'] - except KeyError: - entry = 'fonts/Juventus-Fans-Bold.ttf' - - if value == 'overlay_save_folder': - try: - entry = pref['libraries'][library]['overlay_save_folder'] - except KeyError: - entry = 'overlays/' - - if value == 'trakt_list_privacy': - try: - entry = pref['libraries'][library]['trakt_list_privacy'] - except KeyError: - entry = 'private' - - return entry - -def setting(value): - yaml = YAML() - settings = settings_path - with open(settings) as sf: - pref = yaml.load(sf) - - if value == 'timezone_locality': - try: - use_local = pref['settings']['timezone']['enable'] - if use_local == True: - try: - timezone_valid_sources = ('host', 'default', 'forced') - timezone_source = pref['settings']['timezone']['source'] - if timezone_source not in timezone_valid_sources: - print(f"{timezone_source} : invalid setting") - print(f"Trying host locality.") - timezone_source = 'host' - if timezone_source == 'host': - try: - print(f"Attempting to get local timezone from host environment") - if is_docker == "True": - print("=> Docker environment detected") - try: - timezone = os.environ.get('TZ') - if timezone is None: - print(" Could not retrieve timezone information from docker 'TZ' environment variable.") - print(" => Attempting 'Docker Host'") - try: - print(" Docker Host Detected:", get_os()) - system_tz = tzlocal.get_localzone() - timezone = str(system_tz) - print(f" Using locality {timezone} to adjust for airing dates.") - except Exception as e: - print(" Could not retrieve timezone information from 'Docker Host'.") - print(f" An error occured: {e}") - print(" Falling back to default") - timezone = "America/New_York" - print(f" Using locality {timezone} to adjust for airing dates.") - except Exception as e: - print(f"An error occured: {e}") - print(f"Details:") - print(f"Environment detected") - print(f" => Docker") - print(f"Failed to retrieve timezone from:") - print(f" => Docker 'TZ' environment variable") - print(f" => Docker Host OS:", get_os()) - print("--Falling back to default--") - timezone = "America/New_York" - print(f"Using locality {timezone} to adjust for airing dates.") - entry = timezone - - if is_docker == "False": - try: - system_tz = tzlocal.get_localzone() - timezone = str(system_tz) - print(f'Found timezone information from host') - print(f"Using locality {timezone} to adjust for airing dates.") - except Exception as e: - print("Could not retrieve timezone information from host.") - print(f"An error occured: {e}") - print("Falling back default") - timezone = "America/New_York" - print(f"Using locality {timezone} to adjust for airing dates.") - entry = timezone - - except Exception as e: - print(f'Failed to retrieve local timezone from host') - print(f"An error occured: {e}") - print(f"Falling back to 'default'...") - timezone_source = 'default' - if timezone_source == 'forced': - try: - timezone = pref['settings']['timezone']['locality'] - print(f'Using user defined Timezone => "{timezone}" to adjust for airing dates.') - entry = timezone - except KeyError: - print(f"Timezone 'forced' locality missing or not found in settings.") - print(f'Check configuration/YAML structure') - print(f'Falling back to default...') - timezone_source = 'default' - if timezone_source == 'default': - timezone = 'America/New_York' - print("'default' timezone selected") - print(f"Using locality {timezone} to adjust for airing dates.") - entry = timezone - except Exception as e: - print("Encountered an error while parsing timezone settings:") - print(f"{e}") - - elif use_local == False: - timezone = 'America/New_York' - print(f"Using locality {timezone} to adjust for airing dates.") - entry = timezone - - - except KeyError: - timezone = 'America/New_York' - print(f"Using default locality '{timezone}' to adjust for airing dates.") - entry = timezone - - - if value == 'rsback_color': - entry = pref['returning_soon_bgcolor'] - if value == 'rsfont_color': - entry = pref['returning_soon_fontcolor'] - - if value == 'rs_vertical_align': - try: - entry = pref['vertical_align'] - except KeyError: - entry = 'top' - - if value == 'rs_horizontal_align': - try: - entry = pref['horizontal_align'] - except KeyError: - entry = 'center' - - if value == 'rs_horizontal_offset': - try: - entry = pref['horizontal_offset'] - except KeyError: - entry = '0' - - if value == 'rs_vertical_offset': - try: - entry = pref['vertical_offset'] - except KeyError: - entry = '0' - - if value == 'prefix': - entry = pref['overlay_prefix'] - if value == 'dateStyle': - entry = pref['date_style'] - if value == 'zeros': - try: - entry = pref['leading_zeros'] - except: - entry = True - if value == 'delimiter': - try: - entry = pref['date_delimiter'] - except: - entry = "/" - if value == 'year': - try: - entry = pref['year_in_dates'] - except: - entry = False - - - if value == 'ovUpcoming': - try: - entry = pref['extra_overlays']['upcoming']['use'] - except: - entry = False - if value == 'ovUpcomingColor': - try: - entry = pref['extra_overlays']['upcoming']['bgcolor'] - except KeyError: - entry = "#fc4e03" - if value == 'ovUpcomingFontColor': - try: - entry = pref['extra_overlays']['upcoming']['font_color'] - except KeyError: - entry = "#FFFFFF" - if value == 'ovUpcomingText': - try: - entry = pref['extra_overlays']['upcoming']['text'] - except KeyError: - entry = "U P C O M I N G" - - if value == 'ovUpcoming_horizontal_align': - try: - entry = pref['extra_overlays']['upcoming']['horizontal_align'] - except KeyError: - entry = 'center' - - if value == 'ovUpcoming_vertical_align': - try: - entry = pref['extra_overlays']['upcoming']['vertical_align'] - except KeyError: - entry = 'top' - - if value == 'ovUpcoming_horizontal_offset': - try: - entry = pref['extra_overlays']['upcoming']['horizontal_offset'] - except KeyError: - entry = '0' - - if value == 'ovUpcoming_vertical_offset': - try: - entry = pref['extra_overlays']['upcoming']['vertical_offset'] - except KeyError: - entry = '0' - - - - if value == 'ovNew': - try: - entry = pref['extra_overlays']['new']['use'] - except: - entry = False - if value == 'ovNewDays': - try: - entry = pref['extra_overlays']['new']['new_days'] - except KeyError: - entry = 21 - if value == 'ovNewColor': - try: - entry = pref['extra_overlays']['new']['bgcolor'] - except KeyError: - entry = "#008001" - if value == 'ovNewFontColor': - try: - entry = pref['extra_overlays']['new']['font_color'] - except KeyError: - entry = "#FFFFFF" - if value == 'ovNewText': - try: - entry = pref['extra_overlays']['new']['text'] - except KeyError: - entry = 'N E W S E R I E S' - - if value == 'ovNew_horizontal_align': - try: - entry = pref['extra_overlays']['new']['horizontal_align'] - except KeyError: - entry = 'center' - - if value == 'ovNew_vertical_align': - try: - entry = pref['extra_overlays']['new']['vertical_align'] - except KeyError: - entry = 'top' - - if value == 'ovNew_horizontal_offset': - try: - entry = pref['extra_overlays']['new']['horizontal_offset'] - except KeyError: - entry = '0' - - if value == 'ovNew_vertical_offset': - try: - entry = pref['extra_overlays']['new']['vertical_offset'] - except KeyError: - entry = '0' - - - - if value == 'ovNewNext': - try: - entry = pref['extra_overlays']['new_next_air']['use'] - except KeyError: - entry = False - - if value == 'ovNewNextAirDays': - try: - entry = pref['extra_overlays']['new_next_air']['new_next_air_days'] - except KeyError: - entry = 21 - - if value == 'ovNewNextColor': - try: - entry = pref['extra_overlays']['new_next_air']['bgcolor'] - except KeyError: - entry = "#008001" - - if value == 'ovNewNextFontColor': - try: - entry = pref['extra_overlays']['new_next_air']['font_color'] - except KeyError: - entry = "#FFFFFF" - - if value == 'ovNewNextText': - try: - entry = pref['extra_overlays']['new_next_air']['text'] - except KeyError: - entry = 'NEW · AIRING' - - if value == 'ovNewNext_horizontal_align': - try: - entry = pref['extra_overlays']['new_next_air']['horizontal_align'] - except KeyError: - entry = 'center' - - if value == 'ovNewNext_vertical_align': - try: - entry = pref['extra_overlays']['new_next_air']['vertical_align'] - except KeyError: - entry = 'top' - - if value == 'ovNewNext_horizontal_offset': - try: - entry = pref['extra_overlays']['new_next_air']['horizontal_offset'] - except KeyError: - entry = '0' - - if value == 'ovNewNext_vertical_offset': - try: - entry = pref['extra_overlays']['new_next_air']['vertical_offset'] - except KeyError: - entry = '0' - - - if value == 'ovReturning': - try: - entry = pref['extra_overlays']['returning']['use'] - except: - entry = False - if value == 'ovReturningColor': - entry = pref['extra_overlays']['returning']['bgcolor'] - if value == 'ovReturningFontColor': - entry = pref['extra_overlays']['returning']['font_color'] - if value == 'ovReturningText': - entry = pref['extra_overlays']['returning']['text'] - - if value == 'ovReturning_horizontal_align': - try: - entry = pref['extra_overlays']['returning']['horizontal_align'] - except KeyError: - entry = 'center' - - if value == 'ovReturning_vertical_align': - try: - entry = pref['extra_overlays']['returning']['vertical_align'] - except KeyError: - entry = 'top' - - if value == 'ovReturning_horizontal_offset': - try: - entry = pref['extra_overlays']['returning']['horizontal_offset'] - except KeyError: - entry = '0' - - if value == 'ovReturning_vertical_offset': - try: - entry = pref['extra_overlays']['returning']['vertical_offset'] - except KeyError: - entry = '0' - - - - - if value == 'ovAiring': - try: - entry = pref['extra_overlays']['airing']['use'] - except: - entry = False - if value == 'ovAiringColor': - entry = pref['extra_overlays']['airing']['bgcolor'] - if value == 'ovAiringFontColor': - entry = pref['extra_overlays']['airing']['font_color'] - if value == 'ovAiringText': - entry = pref['extra_overlays']['airing']['text'] - - if value == 'ovAiring_horizontal_align': - try: - entry = pref['extra_overlays']['airing']['horizontal_align'] - except KeyError: - entry = 'center' - - if value == 'ovAiring_vertical_align': - try: - entry = pref['extra_overlays']['airing']['vertical_align'] - except KeyError: - entry = 'top' - - if value == 'ovAiring_horizontal_offset': - try: - entry = pref['extra_overlays']['airing']['horizontal_offset'] - except KeyError: - entry = '0' - - if value == 'ovAiring_vertical_offset': - try: - entry = pref['extra_overlays']['airing']['vertical_offset'] - except KeyError: - entry = '0' - - if value == 'ovAiringNext': - try: - entry = pref['extra_overlays']['airing_next']['use'] - except KeyError: - entry = False - - if value == 'ovAiringNextColor': - try: - entry = pref['extra_overlays']['airing_next']['bgcolor'] - except KeyError: - entry = "#006580" - - if value == 'ovAiringNextFontColor': - try: - entry = pref['extra_overlays']['airing_next']['font_color'] - except KeyError: - entry = "#FFFFFF" - - if value == 'ovAiringNextText': - try: - entry = pref['extra_overlays']['airing_next']['text'] - except KeyError: - entry = 'AIRING' - - if value == 'ovAiringNext_horizontal_align': - try: - entry = pref['extra_overlays']['airing_next']['horizontal_align'] - except KeyError: - entry = 'center' - - if value == 'ovAiringNext_vertical_align': - try: - entry = pref['extra_overlays']['airing_next']['vertical_align'] - except KeyError: - entry = 'top' - - if value == 'ovAiringNext_horizontal_offset': - try: - entry = pref['extra_overlays']['airing_next']['horizontal_offset'] - except KeyError: - entry = '0' - - if value == 'ovAiringNext_vertical_offset': - try: - entry = pref['extra_overlays']['airing_next']['vertical_offset'] - except KeyError: - entry = '0' - - - if value == 'ovEnded': - try: - entry = pref['extra_overlays']['ended']['use'] - except: - entry = False - if value == 'ovEndedColor': - entry = pref['extra_overlays']['ended']['bgcolor'] - if value == 'ovEndedFontColor': - entry = pref['extra_overlays']['ended']['font_color'] - if value == 'ovEndedText': - entry = pref['extra_overlays']['ended']['text'] - - if value == 'ovEnded_horizontal_align': - try: - entry = pref['extra_overlays']['ended']['horizontal_align'] - except KeyError: - entry = 'center' - - if value == 'ovEnded_vertical_align': - try: - entry = pref['extra_overlays']['ended']['vertical_align'] - except KeyError: - entry = 'top' - - if value == 'ovEnded_horizontal_offset': - try: - entry = pref['extra_overlays']['ended']['horizontal_offset'] - except KeyError: - entry = '0' - - if value == 'ovEnded_vertical_offset': - try: - entry = pref['extra_overlays']['ended']['vertical_offset'] - except KeyError: - entry = '0' - - - - - if value == 'ovCanceled': - try: - entry = pref['extra_overlays']['canceled']['use'] - except: - entry = False - if value == 'ovCanceledColor': - entry = pref['extra_overlays']['canceled']['bgcolor'] - if value == 'ovCanceledFontColor': - entry = pref['extra_overlays']['canceled']['font_color'] - if value == 'ovCanceledText': - entry = pref['extra_overlays']['canceled']['text'] - - if value == 'ovCanceled_horizontal_align': - try: - entry = pref['extra_overlays']['canceled']['horizontal_align'] - except KeyError: - entry = 'center' - - if value == 'ovCanceled_vertical_align': - try: - entry = pref['extra_overlays']['canceled']['vertical_align'] - except KeyError: - entry = 'top' - - if value == 'ovCanceled_horizontal_offset': - try: - entry = pref['extra_overlays']['canceled']['horizontal_offset'] - except KeyError: - entry = '0' - - if value == 'ovCanceled_vertical_offset': - try: - entry = pref['extra_overlays']['canceled']['vertical_offset'] - except KeyError: - entry = '0' - - return entry - -def traktApi(type): - yaml = YAML() - config = config_path - with open(config) as fp: - trakt = yaml.load(fp) - if type == 'token': - key = trakt['trakt']['authorization']['access_token'] - if type == 'client': - key = trakt['trakt']['client_id'] - if type == 'secret': - key = trakt['trakt']['client_secret'] - if type == 'me': - api = traktApi('client') - access = traktApi('token') - headers = { - 'Content-Type': 'application/json', - 'Authorization': 'Bearer ' + access + '', - 'trakt-api-version': '2', - 'trakt-api-key': '' + api + '' - } - key = json.loads(json.dumps(requests.get('https://api.trakt.tv/users/me', headers=headers).json()))['username'] - return key - -def tmdbApi(var): - yaml = YAML() - config = config_path - with open(config) as fp: - tmdb = yaml.load(fp) - if var == 'token': - key = tmdb['tmdb']['apikey'] - return key - -def plexApi(vix): - yaml = YAML() - config = config_path - with open(config) as fp: - plex = yaml.load(fp) - if vix == 'url': - key = plex['plex']['url'] - if vix == 'token': - key = plex['plex']['token'] - return key - - -def plexGet(identifier): - URL = plexApi('url') + '/library/sections/?X-Plex-Token=' + plexApi('token') - libraries = re.sub("0//", "0/", URL) - libSearch = ET.fromstring(requests.get(libraries).text) - for directory in libSearch.findall('Directory'): - if directory.get('title') == identifier: - key = directory.get('key') - title = directory.get('title') - return key - -def cleanPath(string): - cleanedPath = re.sub(r'[^\w]+', '-', string) - cleanedPath = cleanedPath.rstrip('-') - while '--' in cleanedPath: - cleanedPath = cleanedPath.replace('--', '-') - return cleanedPath - - - -class SonarrApi: - def __init__(self): - with open(config_path, "r") as pmm_config_yaml: - pmm_config_file = yaml.load(pmm_config_yaml) - self.sonarr_url = pmm_config_file['sonarr']['url'] - self.sonarr_token = pmm_config_file['sonarr']['token'] - self.sonarr_api_url = f'{self.sonarr_url}/api' - self.sonarr_status_endpoint = f'{self.sonarr_api_url}/system/status' - self.sonarr_series_endpoint = f'{self.sonarr_api_url}/series' - self.sonarr_headers = {'X-Api-Key': self.sonarr_token} - self.connected = self.check_connection() - - def check_connection(self): - try: - response = requests.get(self.sonarr_status_endpoint, headers=self.sonarr_headers) - response.raise_for_status() # Raises an error for bad status codes - print("Connection to Sonarr successful.") - return True # Connection successful - except requests.exceptions.RequestException as e: - print(f"Connection to Sonarr failed: {e}") - return False # Connection failed - - - def get_series_list(self): - response = requests.get(self.sonarr_series_endpoint, headers=self.sonarr_headers) - response.raise_for_status() - return sorted(response.json(), key=lambda x: x['title']) - - def get_missing_episodes_count(self, series_id): - sonarr_episodes_endpoint = f'{self.sonarr_api_url}/episode' - params = {'seriesId': series_id} - response = requests.get(sonarr_episodes_endpoint, headers=self.sonarr_headers, params=params) - response.raise_for_status() - - - episodes = response.json() - available_missing_episodes = len([ - episode for episode in episodes if episode.get('airDateUtc') and not episode.get('hasFile') - and episode['seasonNumber'] != 0 - and datetime.datetime.strptime(episode['airDateUtc'], "%Y-%m-%dT%H:%M:%SZ") < today - ]) - - total_episodes = len([episode for episode in episodes if episode.get('airDateUtc') and episode['seasonNumber'] != 0 - and datetime.datetime.strptime(episode.get('airDateUtc'), "%Y-%m-%dT%H:%M:%SZ") < today]) - self.missing_count = available_missing_episodes - self.total_count = total_episodes - return self -""") - create_vars_file.close() -else: print("Vars module file present.") - - # Check if this is a Docker Build to format PMM config folder directory is_docker = os.environ.get('PATTRMM_DOCKER', "False") From 7faa5370c37d51236c0c056efd0dae4dc6ab2223 Mon Sep 17 00:00:00 2001 From: InsertDisc <31751462+InsertDisc@users.noreply.github.com> Date: Sun, 19 Jan 2025 14:54:08 -0500 Subject: [PATCH 3/5] Update vars.py Removed special character from default values to increase compatibility. --- vars.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/vars.py b/vars.py index 0f83164..226ca3c 100644 --- a/vars.py +++ b/vars.py @@ -111,7 +111,7 @@ def settings(self): me = traktApi('me') slug = cleanPath(self.extension_library) self.slug = slug - trakt_list_meta = f"https://trakt.tv/users/{me}/lists/in-history-{slug}" + trakt_list_meta = f"https://trakt.tv/users/{me}/lists/<>" try: self.trakt_list_privacy = pref['libraries'][self.extension_library]['extensions']['in-history']['trakt_list_privacy'] except KeyError: @@ -189,12 +189,21 @@ def settings(self): self.minimum = minimum except KeyError: self.minimum = 0 + print(f"Using default minimum size: {self.minimum}") try: maximum = pref['libraries'][self.extension_library]['extensions']['by_size']['maximum'] self.maximum = maximum except KeyError: self.maximum = None + print(f"Using default maximum size: None") + + try: + limit = pref['libraries'][self.extension_library]['extensions']['by_size']['limit'] + self.limit = limit + except KeyError: + self.limit = 500 + print(f"Using default limit: {self.limit}") try: self.save_folder = pref['libraries'][self.extension_library]['extensions']['by_size']['save_folder'] @@ -1060,7 +1069,7 @@ def setting(value): try: entry = pref['extra_overlays']['new_next_air']['text'] except KeyError: - entry = 'NEW · AIRING' + entry = 'NEW - AIRING' if value == 'ovNewNext_horizontal_align': try: From 603a3bef0710281fa96403db37f638a83674dae1 Mon Sep 17 00:00:00 2001 From: InsertDisc <31751462+InsertDisc@users.noreply.github.com> Date: Sun, 19 Jan 2025 15:09:21 -0500 Subject: [PATCH 4/5] Update main.py Refactor to allow for multiple run times. PATTRMM_TIME is now PATTRMM_TIMES New run argument --times --- main.py | 50 ++++++++++++++++++++++++-------------------------- 1 file changed, 24 insertions(+), 26 deletions(-) diff --git a/main.py b/main.py index 1304790..0522127 100644 --- a/main.py +++ b/main.py @@ -1,37 +1,35 @@ +import os import time import argparse -import os + +# Arguments parser = argparse.ArgumentParser() -parser.add_argument("--run", action="store_true") -parser.parse_args() +parser.add_argument("--run", action="store_true", help="Run immediately.") +parser.add_argument("--times", type=str, help="Comma-separated times to run, e.g., '02:00,04:30'") args = parser.parse_args() -if args.run == True: +# Get times +run_times = args.times.split(",") if args.times else os.getenv("PATTRMM_TIMES", "02:00").split(",") +run_now = args.run or os.getenv("RUN_NOW", "false").lower() == "true" + +# Run now +if run_now: + print("Running immediately...") with open("pattrmm.py") as f: exec(f.read()) -if "RUN_NOW" in os.environ: - argument = os.environ["RUN_NOW"] - if argument.lower() == "true": - with open("pattrmm.py") as f: - exec(f.read()) - +# Schedule else: - if "PATTRMM_TIME" in os.environ: - runwhen = os.environ["PATTRMM_TIME"] - else: - runwhen = "02:00" - t = 1 - - dtime_24hour = time.strptime(runwhen, "%H:%M") - dtime_12hour = time.strftime( "%I:%M %p", dtime_24hour ) - - print("Waiting for next run at " + str(dtime_12hour)) - while t: - if runwhen == time.strftime('%H:%M'): + print(f"Waiting for the next run at: {', '.join(run_times)}") + while True: + current_time = time.strftime("%H:%M") + + if current_time in run_times: + print(f"Starting {current_time} run...") with open("pattrmm.py") as f: exec(f.read()) - time.sleep(60) - print("Waiting for next run at " + str(dtime_12hour)) - time.sleep(.5) - + + time.sleep(60) # Wait a minute + print(f"Waiting for the next run at: {', '.join(run_times)}") + + time.sleep(1) # Check every second From e4c0353400f5d7df45c61d8764d41d0b50f1a0e3 Mon Sep 17 00:00:00 2001 From: InsertDisc <31751462+InsertDisc@users.noreply.github.com> Date: Sun, 19 Jan 2025 15:42:25 -0500 Subject: [PATCH 5/5] Update README.md --- README.md | 48 ++++++++++++++++++++++++++---------------------- 1 file changed, 26 insertions(+), 22 deletions(-) diff --git a/README.md b/README.md index 8f965a6..2b31449 100644 --- a/README.md +++ b/README.md @@ -6,9 +6,9 @@ [![Docker Nightly](https://img.shields.io/badge/Docker-develop-blue?style=plastic)](https://github.com/users/insertdisc/packages/container/package/pattrmm?tag=nightly) [![Discord](https://img.shields.io/discord/1171872202858188910?style=plastic&label=Discord&color=%252300bc8c)](https://discord.com/invite/7yUYdqgxkn) -PATTRMM (Plex Assistant To The Regional Meta Manager) is a python script that automates a 'Returning Soon' Trakt list in chronological order by date and matching metadata and overlay file for use in [Plex Meta Manager](https://metamanager.wiki/en/latest/index.html). Extensions have been added to further PATTRMM's capabilities. +PATTRMM (Plex Assistant To The Regional Meta Manager) is a python script that automates a 'Returning Soon' Trakt list in chronological order by date and matching metadata and overlay file for use in [Kometa](https://metamanager.wiki/en/latest/index.html). Extensions have been added to further PATTRMM's capabilities. -> **_NOTE:_** The latest update changes the *-returning-soon-metadata.yml to *-returning-soon-collection.yml. Make sure to update your pmm config file with the new filename if you've updated your script. If you want to use the new alignment options then you will also need to delete your old 'pattrmm/preferences/' template files. +> **_NOTE:_** The latest update changes the *-returning-soon-metadata.yml to *-returning-soon-collection.yml. Make sure to update your Kometa config file with the new filename if you've updated your script. If you want to use the new alignment options then you will also need to delete your old 'pattrmm/preferences/' template files. ## Installation @@ -16,22 +16,22 @@ PATTRMM (Plex Assistant To The Regional Meta Manager) is a python script that au ### Requirements -Obviously, Plex-Meta-Manager (PMM) must be installed and setup on your machine. Additionally, Trakt MUST be setup in your PMM installation to post 'returning soon' series and various 'extensions' to. This is also what the *-returning-soon-metadata.yml and *-in-history.yml files will pull from. +Obviously, Kometa must be installed and setup on your machine. Additionally, Trakt MUST be setup in your Kometa installation to post 'returning soon' series and various 'extensions' to. This is also what the *-returning-soon-collection.yml and *-in-history.yml files will pull from. Required Python modules: - ruamel.yaml - requests - tzlocal These are included in requirements.txt. -If you want to use the default template font you will also need the font from the extras folder in your pmm fonts folder. +If you want to use the default template font you will also need the font from the extras folder in your Kometa fonts folder. ### Stand-alone setup -1. Drop pattrymm.py in a subfolder of your Plex Meta Manager config folder (i.e. Plex-meta-manager/config/pattrmm/pattrmm.py) and run it. A settings file will be created in the newly created preferences folder. The script will stop. +1. Drop pattrymm.py and vars.py in a subfolder of your Kometa config folder (i.e. Kometa/config/pattrmm/pattrmm.py Kometa/config/pattrmm/vars.py) and run pattrmm.py. A settings file will be created in the newly created preferences folder. The script will stop. 2. Fill in the appropriate settings in preferences/settings.yml (see [Settings file](#settings-file)). You can modify the appearance of the generated overlays file using the preferences/*-status-template.yml files. 3. Run the script again after you make your changes to initiate a full cycle. -To update the stand-alone version, you need to delete OR replace vars.py and replace pattrmm.py. +To update the stand-alone version, you need to replace vars.py and pattrmm.py. ### Docker setup @@ -45,12 +45,12 @@ services: - PUID=1000 - GUID=1000 - TZ=America/New_York - - PATTRMM_TIME=02:00 # Schedule run time + - PATTRMM_TIMES=02:00, 04:50 # Schedule run times - RUN_NOW=False # setting this to True will function the same as a -run command line argument and ignore the PATTRMM_TIME. volumes: - /path/to/pattrmm/data:/data - /path/to/pattrmm/preferences:/preferences - - /path/to/pmm/config:/config + - /path/to/kometa/config:/config restart: unless-stopped ``` @@ -62,15 +62,15 @@ You can initialize the settings file for the docker version with this command: docker run --rm -it -v "/path/to/pattrmm/preferences:/preferences" ghcr.io/insertdisc/pattrmm:nightly --run ``` -If you want to run Pattrmm now and not wait for `PATTRMM_TIME`, use this command: +If you want to run Pattrmm now and not wait for `PATTRMM_TIMES`, use this command: *Replace the paths below with the appropriate location for the directories used.* ```bash -docker run --rm -it -v "/path/to/pattrmm/preferences:/preferences" -v "/path/to/pattrmm/data:/data" -v "/path/to/pmm/config:/config" ghcr.io/insertdisc/pattrmm:nightly --run +docker run --rm -it -v "/path/to/pattrmm/preferences:/preferences" -v "/path/to/pattrmm/data:/data" -v "/path/to/kometa/config:/config" ghcr.io/insertdisc/pattrmm:nightly --run ``` -The Docker version runs daily at the specified PATTRMM_TIME. This is a 24 hour format. +The Docker version runs daily at the specified PATTRMM_TIMES. These are in a 24 hour format. ### unRAID setup @@ -79,7 +79,7 @@ Pattrmm now has a template available in the Community Applications for unRAID. T 1. Head over to the `Apps` tab in your unRAID instance and search for `Pattrmm`. There should only be one template from Droppisalt. 2. Select the `Install` button 3. Choose which branch you want to run `latest`, `develop`, or `nightly`. -4. Fill-in the required Paths and Environment Variables. **Make sure of the following:** The `/config` points to your `Plex-Meta-Manager` config.yml and NOT to the default folder that unRAID might point to (often `.../appdata/pattrmm`) +4. Fill-in the required Paths and Environment Variables. **Make sure of the following:** The `/config` points to your `Kometa` config.yml and NOT to the default folder that unRAID might point to (often `.../appdata/pattrmm`) - See below for an example. 5. Select `Apply`. @@ -91,12 +91,12 @@ You can initialize the settings file for the docker version with this command: docker run --rm -it -v "/path/to/pattrmm/preferences:/preferences" ghcr.io/insertdisc/pattrmm:nightly --run ``` -If you want to run Pattrmm now and not wait for `PATTRMM_TIME`, use this command: +If you want to run Pattrmm now and not wait for `PATTRMM_TIMES`, use this command: *Replace the paths below with the appropriate location for the directories used.* ```bash -docker run --rm -it -v "/path/to/pattrmm/preferences:/preferences" -v "/path/to/pattrmm/data:/data" -v "/path/to/pmm/config:/config" ghcr.io/insertdisc/pattrmm:nightly --run +docker run --rm -it -v "/path/to/pattrmm/preferences:/preferences" -v "/path/to/pattrmm/data:/data" -v "/path/to/kometa/config:/config" ghcr.io/insertdisc/pattrmm:nightly --run ``` ![image](https://github.com/InsertDisc/pattrmm/assets/67336980/24e23d34-8d92-4afc-a0bc-138ecfcc3067) @@ -163,6 +163,7 @@ libraries: by_size: minimum: 25 # Size in GB maximum: 90 + limit: 125 # list size limit order_by: size.desc collection_title: Movies sorted by size save_folder: collections/ @@ -262,7 +263,7 @@ extra_overlays: ```yaml save_folder: collections/ - # Specify a location to write the returning soon metadata file to. Your PMM config folder + # Specify a location to write the returning soon metadata file to. Your Kometa config folder # (where your config.yml is), will always be the BASE location. # So, a save_folder of 'collections/' # would put your file in a 'collections' sub-folder. If this directory does not exist @@ -270,15 +271,15 @@ save_folder: collections/ # Default location is beside your config.yml and does not need specified. overlay_save_folder: overlays/ - # Specify a location to write the returning soon overlay file to. Your PMM config folder + # Specify a location to write the returning soon overlay file to. Your Kometa config folder # (where your config.yml is), will always be the BASE location. # So, a save_folder of 'overlays/' # would put your file in a 'overlays' sub-folder. If this directory does not exist # PATTRMM will ATTEMPT to create it. - # Default location is the default PMM 'overlays' folder and does not need specified. + # Default location is the default Kometa 'overlays' folder and does not need specified. font_path: fonts/Juventus-Fans-Bold.ttf - # Specify a path to a font file to use for the overlays. Your PMM config folder + # Specify a path to a font file to use for the overlays. Your Kometa config folder # (where your config.yml is), will always be the BASE location. # Default font path is 'fonts/Juventus-Fans-Bold.ttf' and does not need specified. @@ -309,7 +310,7 @@ returning-soon: False settings: timezone: enable: True - # Enables specifying the timezone used for the TMDB Discover builder in PMM. + # Enables specifying the timezone used for the TMDB Discover builder in Kometa. # Plex Meta Manager defaults to using 'America/New_York' # Default setting is false and PATTRMM will specify the default in the generated files. @@ -325,7 +326,7 @@ settings: locality: Chile/Continental source: default - # Use PMM default 'America/New_York' + # Use Kometa default 'America/New_York' # Does not need defined when 'enable' is set to False, or the timezone setting is missing. date_style: 1 @@ -400,7 +401,7 @@ in-history: #Enables the 'In History' extension for a library. # If an 'ending' year is not specified then the current year will be used as the initial year. save_folder: collections/ - # Specify a location to write the extension metadata file to. Your PMM config folder + # Specify a location to write the extension collection file to. Your Kometa config folder # (where your config.yml is), will always be the BASE location. # So, a save_folder of 'collections/' # would put your file in a 'collections' sub-folder. If this directory does not exist @@ -488,6 +489,9 @@ by_size: #Enables the 'By Size' extension for a library. # This sets the maximum filesize to be included in the filtered list. # The default value has no upper limit. To use this extension with no # top limit, leave out this setting. + + limit: 120 + # Limit the size of the list to this length order_by: size.desc # Further sorting of the filtered list is possible with this option. @@ -503,7 +507,7 @@ by_size: #Enables the 'By Size' extension for a library. # order_by: added.asc save_folder: collections/ - # Specify a location to write the extension metadata file to. Your PMM config folder + # Specify a location to write the extension metadata file to. Your Kometa config folder # (where your config.yml is), will always be the BASE location. # So, a save_folder of 'collections/' # would put your file in a 'collections' sub-folder. If this directory does not exist