mirror of
https://github.com/meisnate12/Plex-Meta-Manager
synced 2024-11-10 06:54:21 +00:00
[125] small wiki fixes
This commit is contained in:
parent
b02f5ba0ea
commit
698a0d647e
9 changed files with 77 additions and 36 deletions
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
|||
1.17.3-develop124
|
||||
1.17.3-develop125
|
||||
|
|
|
@ -407,7 +407,8 @@ html_theme_options = {
|
|||
("_menu", "FlixPatrol Builders", "metadata/builders/flixpatrol", [
|
||||
("FlixPatrol Top Platform", "metadata/builders/flixpatrol", "#flixpatrol-top"),
|
||||
("FlixPatrol Popular", "metadata/builders/flixpatrol", "#flixpatrol-popular"),
|
||||
("FlixPatrol demographicURLTop Platform", "metadata/builders/flixpatrol", "#flixpatrol-url"),
|
||||
("FlixPatrol Demographics", "metadata/builders/flixpatrol", "#flixpatrol-demographics"),
|
||||
("FlixPatrol URL", "metadata/builders/flixpatrol", "#flixpatrol-url"),
|
||||
]),
|
||||
("_menu", "Reciperr Builders", "metadata/builders/reciperr", [
|
||||
("Reciperr List", "metadata/builders/reciperr", "#reciperr-list"),
|
||||
|
|
|
@ -194,6 +194,22 @@ libraries:
|
|||
- file: config/Overlays.yml
|
||||
```
|
||||
|
||||
#### Schedule Overlays
|
||||
|
||||
You can schedule all overlays from a library by adding `schedule` to `overlay_path` and setting it to [Any Schedule Option](../metadata/details/schedule).
|
||||
|
||||
**You cannot schedule individual Overlay Files.**
|
||||
|
||||
```yaml
|
||||
libraries:
|
||||
TV Shows:
|
||||
metadata_path:
|
||||
- file: config/TV Shows.yml
|
||||
overlay_path:
|
||||
- schedule: weekly(sunday)
|
||||
- file: config/Overlays.yml
|
||||
```
|
||||
|
||||
### Report Path
|
||||
|
||||
The `report_path` attribute is used to define where to save the YAML Report file. This file is used to store information about what media is added, removed, filtered, and missing from the Plex library compared to what is expected from the Metadata file.
|
||||
|
|
|
@ -33,8 +33,10 @@ libraries:
|
|||
- pmm: actor
|
||||
schedule: weekly(friday)
|
||||
overlay_path:
|
||||
- pmm: imdb
|
||||
schedule: weekly(saturday)
|
||||
- schedule: weekly(saturday)
|
||||
- pmm: audio_codec
|
||||
- pmm: resolution
|
||||
- pmm: video_format
|
||||
operations:
|
||||
mass_critic_rating_update: tmdb
|
||||
playlist_files:
|
||||
|
|
|
@ -821,6 +821,8 @@ class ConfigFile:
|
|||
if lib and "template_variables" in lib and lib["template_variables"] and isinstance(lib["template_variables"], dict):
|
||||
lib_vars = lib["template_variables"]
|
||||
|
||||
logger.separator("Metadata Files", space=False, border=False)
|
||||
|
||||
try:
|
||||
if lib and "metadata_path" in lib:
|
||||
if not lib["metadata_path"]:
|
||||
|
@ -848,6 +850,8 @@ class ConfigFile:
|
|||
except NotScheduled:
|
||||
params["skip_library"] = True
|
||||
|
||||
logger.separator("Overlay Files", space=False, border=False)
|
||||
|
||||
params["overlay_path"] = []
|
||||
params["remove_overlays"] = False
|
||||
params["reapply_overlays"] = False
|
||||
|
@ -888,9 +892,10 @@ class ConfigFile:
|
|||
if not self.ignore_schedules:
|
||||
err = e
|
||||
if err:
|
||||
raise NotScheduled(f"{err}\n\nOverlays not scheduled to run")
|
||||
raise NotScheduled(f"Overlay Schedule:{err}\n\nOverlays not scheduled to run")
|
||||
params["overlay_path"] = files
|
||||
except NotScheduled as e:
|
||||
logger.info("")
|
||||
logger.info(e)
|
||||
params["overlay_path"] = []
|
||||
params["remove_overlays"] = False
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from datetime import datetime
|
||||
from json import JSONDecodeError
|
||||
from modules import util
|
||||
from modules.util import Failed
|
||||
from modules.util import Failed, LimitReached
|
||||
from urllib.parse import urlparse
|
||||
|
||||
logger = util.logger
|
||||
|
@ -73,6 +73,8 @@ class Mdblist:
|
|||
self.expiration = expiration
|
||||
try:
|
||||
self._request(imdb_id="tt0080684", ignore_cache=True)
|
||||
except LimitReached:
|
||||
self.limit = True
|
||||
except Failed:
|
||||
self.apikey = None
|
||||
raise
|
||||
|
@ -110,6 +112,7 @@ class Mdblist:
|
|||
if "response" in response and response["response"] is False:
|
||||
if response["error"] == "API Limit Reached!":
|
||||
self.limit = True
|
||||
raise LimitReached(f"MdbList Error: {response['error']}")
|
||||
raise Failed(f"MdbList Error: {response['error']}")
|
||||
else:
|
||||
mdb = MDbObj(response)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import os, re
|
||||
from datetime import datetime
|
||||
from modules import plex, util
|
||||
from modules.util import Failed, YAML
|
||||
from modules.util import Failed, LimitReached, YAML
|
||||
|
||||
logger = util.logger
|
||||
|
||||
|
@ -193,6 +193,7 @@ class Operations:
|
|||
mdb_item = None
|
||||
if any([o and o.startswith("mdb") for o in self.library.meta_operations]):
|
||||
if self.config.Mdblist.limit is False:
|
||||
try:
|
||||
if self.library.is_show and tvdb_id and mdb_item is None:
|
||||
try:
|
||||
mdb_item = self.config.Mdblist.get_series(tvdb_id)
|
||||
|
@ -204,6 +205,8 @@ class Operations:
|
|||
if tmdb_id and mdb_item is None:
|
||||
try:
|
||||
mdb_item = self.config.Mdblist.get_movie(tmdb_id)
|
||||
except LimitReached as e:
|
||||
logger.debug(e)
|
||||
except Failed as e:
|
||||
logger.trace(str(e))
|
||||
except Exception:
|
||||
|
@ -212,6 +215,8 @@ class Operations:
|
|||
if imdb_id and mdb_item is None:
|
||||
try:
|
||||
mdb_item = self.config.Mdblist.get_imdb(imdb_id)
|
||||
except LimitReached as e:
|
||||
logger.debug(e)
|
||||
except Failed as e:
|
||||
logger.trace(str(e))
|
||||
except Exception:
|
||||
|
@ -219,6 +224,8 @@ class Operations:
|
|||
raise
|
||||
if mdb_item is None:
|
||||
logger.warning(f"No MdbItem for Guid: {item.guid}")
|
||||
except LimitReached as e:
|
||||
logger.debug(e)
|
||||
|
||||
def update_rating(attribute, item_attr, display):
|
||||
current = getattr(item, item_attr)
|
||||
|
|
|
@ -18,6 +18,9 @@ logger = logging.getLogger("Plex Meta Manager")
|
|||
class TimeoutExpired(Exception):
|
||||
pass
|
||||
|
||||
class LimitReached(Exception):
|
||||
pass
|
||||
|
||||
class Failed(Exception):
|
||||
pass
|
||||
|
||||
|
@ -459,9 +462,10 @@ def load_files(files_to_load, method, schedule=None, lib_vars=None):
|
|||
if not ignore_schedules:
|
||||
err = e
|
||||
if err:
|
||||
logger.warning(f"{err}\n\nMetadata File{'s' if len(current) > 1 else ''} not scheduled to run")
|
||||
logger.warning(f"Metadata Schedule:{err}\n\nMetadata File{'s' if len(current) > 1 else ''} not scheduled to run")
|
||||
for file_type, file_path, temp_vars, asset_directory in current:
|
||||
logger.warning(f"{file_type}: {file_path}")
|
||||
logger.info("")
|
||||
continue
|
||||
files.extend(current)
|
||||
else:
|
||||
|
|
|
@ -486,8 +486,11 @@ def run_libraries(config):
|
|||
logger.separator(f"Deleting all Collections from the {library.name} Library", space=False, border=False)
|
||||
logger.info("")
|
||||
for collection in library.get_all_collections():
|
||||
logger.info(f"Collection {collection.title} Deleted")
|
||||
try:
|
||||
library.query(collection.delete)
|
||||
logger.info(f"Collection {collection.title} Deleted")
|
||||
except NotFound:
|
||||
logger.error(f"Collection {collection.title} Failed to Delete")
|
||||
library_status[library.name]["All Collections Deleted"] = str(datetime.now() - time_start).split('.')[0]
|
||||
|
||||
time_start = datetime.now()
|
||||
|
|
Loading…
Reference in a new issue