mirror of
https://github.com/meisnate12/Plex-Meta-Manager
synced 2024-11-10 06:54:21 +00:00
[28] remove timers
This commit is contained in:
parent
6163a1f83b
commit
d91c6bf635
7 changed files with 54 additions and 48 deletions
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
|||
1.16.5-develop27
|
||||
1.16.5-develop28
|
||||
|
|
|
@ -1984,14 +1984,19 @@ class CollectionBuilder:
|
|||
return attribute, modifier, final
|
||||
|
||||
def fetch_item(self, item):
|
||||
if isinstance(item, (Movie, Show, Season, Episode, Artist, Album, Track)):
|
||||
if item.ratingKey not in self.library.cached_items:
|
||||
self.library.cached_items[item.ratingKey] = (item, False)
|
||||
return item
|
||||
key = int(item)
|
||||
if key in self.library.cached_items:
|
||||
cached_item, full_obj = self.library.cached_items[key]
|
||||
return cached_item
|
||||
try:
|
||||
key = item.ratingKey if isinstance(item, (Movie, Show, Season, Episode, Artist, Album, Track)) else int(item)
|
||||
if key in self.library.cached_items:
|
||||
return self.library.cached_items[key]
|
||||
current = self.library.fetchItem(key)
|
||||
if not isinstance(current, (Movie, Show, Season, Episode, Artist, Album, Track)):
|
||||
raise NotFound
|
||||
self.library.cached_items[key] = current
|
||||
self.library.cached_items[key] = (current, True)
|
||||
return current
|
||||
except (BadRequest, NotFound):
|
||||
raise Failed(f"Plex Error: Item {item} not found")
|
||||
|
@ -2146,6 +2151,7 @@ class CollectionBuilder:
|
|||
def check_filters(self, item, display):
|
||||
if (self.filters or self.tmdb_filters) and not self.details["only_filter_missing"]:
|
||||
logger.ghost(f"Filtering {display} {item.title}")
|
||||
self.library.reload(item)
|
||||
if self.tmdb_filters and isinstance(item, (Movie, Show)):
|
||||
if item.ratingKey not in self.library.movie_rating_key_map and item.ratingKey not in self.library.show_rating_key_map:
|
||||
logger.warning(f"Filter Error: No {'TMDb' if self.library.is_movie else 'TVDb'} ID found for {item.title}")
|
||||
|
|
|
@ -303,7 +303,7 @@ class Library(ABC):
|
|||
logger.info("")
|
||||
items = self.get_all()
|
||||
for item in items:
|
||||
self.cached_items[item.ratingKey] = item
|
||||
self.cached_items[item.ratingKey] = (item, False)
|
||||
return items
|
||||
|
||||
def map_guids(self, items):
|
||||
|
|
|
@ -67,6 +67,11 @@ class Operations:
|
|||
reverse_anidb[v] = k
|
||||
|
||||
for i, item in enumerate(items, 1):
|
||||
try:
|
||||
self.library.reload(item)
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
continue
|
||||
logger.ghost(f"Processing: {i}/{len(items)} {item.title}")
|
||||
if self.library.assets_for_all:
|
||||
self.library.update_asset2(item)
|
||||
|
|
|
@ -175,13 +175,13 @@ class Overlays:
|
|||
logger.separator(f"Applying Overlays for the {self.library.name} Library")
|
||||
logger.info("")
|
||||
for i, (over_key, (item, over_names)) in enumerate(sorted(key_to_overlays.items(), key=lambda io: io[1][0].titleSort), 1):
|
||||
util.check_time("Overlay Start Time")
|
||||
try:
|
||||
logger.ghost(f"Overlaying: {i}/{len(key_to_overlays)} {item.title}")
|
||||
image_compare = None
|
||||
overlay_compare = None
|
||||
if self.config.Cache:
|
||||
image, image_compare, _ = self.config.Cache.query_image_map(item.ratingKey, f"{self.library.image_table_name}_overlays")
|
||||
|
||||
overlay_compare = [] if overlay_compare is None else util.get_list(overlay_compare)
|
||||
has_overlay = any([item_tag.tag.lower() == "overlay" for item_tag in item.labels])
|
||||
|
||||
|
@ -196,13 +196,11 @@ class Overlays:
|
|||
overlay_change = True
|
||||
|
||||
clean_name, _ = util.validate_filename(item.title)
|
||||
util.check_time("Initial Bit")
|
||||
poster, _, item_dir = self.library.find_assets(
|
||||
name="poster" if self.library.asset_folders else clean_name,
|
||||
folder_name=clean_name if self.library.asset_folders else None,
|
||||
prefix=f"{item.title}'s "
|
||||
)
|
||||
util.check_time("Find Asset Time")
|
||||
|
||||
has_original = None
|
||||
changed_image = False
|
||||
|
@ -210,22 +208,17 @@ class Overlays:
|
|||
if poster:
|
||||
if image_compare and str(poster.compare) != str(image_compare):
|
||||
changed_image = True
|
||||
util.check_time("Choose Image (From Assets) Time")
|
||||
elif has_overlay:
|
||||
test = "Backup"
|
||||
if os.path.exists(os.path.join(self.library.overlay_backup, f"{item.ratingKey}.png")):
|
||||
has_original = os.path.join(self.library.overlay_backup, f"{item.ratingKey}.png")
|
||||
elif os.path.exists(os.path.join(self.library.overlay_backup, f"{item.ratingKey}.jpg")):
|
||||
has_original = os.path.join(self.library.overlay_backup, f"{item.ratingKey}.jpg")
|
||||
else:
|
||||
test = "Online"
|
||||
new_backup = find_poster_url(item)
|
||||
if new_backup is None:
|
||||
new_backup = item.posterUrl
|
||||
util.check_time(f"Choose Image (From {test}) Time")
|
||||
else:
|
||||
new_backup = item.posterUrl
|
||||
util.check_time("Choose Image (From Plex) Time")
|
||||
if new_backup:
|
||||
changed_image = True
|
||||
image_response = self.config.get(new_backup)
|
||||
|
@ -238,7 +231,6 @@ class Overlays:
|
|||
while util.is_locked(backup_image_path):
|
||||
time.sleep(1)
|
||||
has_original = backup_image_path
|
||||
util.check_time("Find Image Time")
|
||||
|
||||
poster_compare = None
|
||||
if poster is None and has_original is None:
|
||||
|
@ -246,7 +238,6 @@ class Overlays:
|
|||
elif changed_image or overlay_change:
|
||||
new_poster = Image.open(poster.location if poster else has_original).convert("RGBA")
|
||||
temp = os.path.join(self.library.overlay_folder, f"temp.png")
|
||||
util.check_time("Open Image Time")
|
||||
try:
|
||||
blur_num = 0
|
||||
for over_name in over_names:
|
||||
|
@ -275,5 +266,4 @@ class Overlays:
|
|||
poster_compare, overlay=','.join(over_names))
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
util.check_time("Overall Overlay Time", end=True)
|
||||
logger.exorcise()
|
||||
|
|
|
@ -464,13 +464,18 @@ class Plex(Library):
|
|||
collection.sortUpdate(sort=data)
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
||||
def reload(self, item):
|
||||
def reload(self, item, force=True):
|
||||
is_full = False
|
||||
if item.ratingKey in self.cached_items:
|
||||
cached_item, is_full = self.cached_items[item.ratingKey]
|
||||
try:
|
||||
item.reload(checkFiles=False, includeAllConcerts=False, includeBandwidths=False, includeChapters=False,
|
||||
includeChildren=False, includeConcerts=False, includeExternalMedia=False, includeExtras=False,
|
||||
includeFields=False, includeGeolocation=False, includeLoudnessRamps=False, includeMarkers=False,
|
||||
includeOnDeck=False, includePopularLeaves=False, includeRelated=False,
|
||||
includeRelatedCount=0, includeReviews=False, includeStations=False)
|
||||
if not is_full:
|
||||
item.reload(checkFiles=False, includeAllConcerts=False, includeBandwidths=False, includeChapters=False,
|
||||
includeChildren=False, includeConcerts=False, includeExternalMedia=False, includeExtras=False,
|
||||
includeFields=False, includeGeolocation=False, includeLoudnessRamps=False, includeMarkers=False,
|
||||
includeOnDeck=False, includePopularLeaves=False, includeRelated=False,
|
||||
includeRelatedCount=0, includeReviews=False, includeStations=False)
|
||||
self.cached_items[item.ratingKey] = (item, True)
|
||||
except (BadRequest, NotFound) as e:
|
||||
logger.stacktrace()
|
||||
raise Failed(f"Item Failed to Load: {e}")
|
||||
|
@ -711,7 +716,6 @@ class Plex(Library):
|
|||
for i, item in enumerate(all_items, 1):
|
||||
logger.ghost(f"Processing: {i}/{len(all_items)} {item.title}")
|
||||
add_item = True
|
||||
self.reload(item)
|
||||
for collection in item.collections:
|
||||
if collection.id in collection_indexes:
|
||||
add_item = False
|
||||
|
|
|
@ -281,31 +281,32 @@ def update_libraries(config):
|
|||
if not operations_only and (library.overlay_files or library.remove_overlays):
|
||||
library.Overlays.run_overlays()
|
||||
|
||||
for metadata in library.metadata_files:
|
||||
metadata_name = metadata.get_file_name()
|
||||
if config.requested_metadata_files and metadata_name not in config.requested_metadata_files:
|
||||
if not operations_only and not overlays_only:
|
||||
for metadata in library.metadata_files:
|
||||
metadata_name = metadata.get_file_name()
|
||||
if config.requested_metadata_files and metadata_name not in config.requested_metadata_files:
|
||||
logger.info("")
|
||||
logger.separator(f"Skipping {metadata_name} Metadata File")
|
||||
continue
|
||||
logger.info("")
|
||||
logger.separator(f"Skipping {metadata_name} Metadata File")
|
||||
continue
|
||||
logger.info("")
|
||||
logger.separator(f"Running {metadata_name} Metadata File\n{metadata.path}")
|
||||
if not config.test_mode and not config.resume_from and not collection_only and not operations_only and not overlays_only:
|
||||
try:
|
||||
metadata.update_metadata()
|
||||
except Failed as e:
|
||||
library.notify(e)
|
||||
logger.error(e)
|
||||
collections_to_run = metadata.get_collections(config.requested_collections)
|
||||
if config.resume_from and config.resume_from not in collections_to_run:
|
||||
logger.info("")
|
||||
logger.warning(f"Collection: {config.resume_from} not in Metadata File: {metadata.path}")
|
||||
continue
|
||||
if collections_to_run and not operations_only and not overlays_only:
|
||||
logger.info("")
|
||||
logger.separator(f"{'Test ' if config.test_mode else ''}Collections")
|
||||
logger.remove_library_handler(library.mapping_name)
|
||||
run_collection(config, library, metadata, collections_to_run)
|
||||
logger.re_add_library_handler(library.mapping_name)
|
||||
logger.separator(f"Running {metadata_name} Metadata File\n{metadata.path}")
|
||||
if not config.test_mode and not config.resume_from and not collection_only:
|
||||
try:
|
||||
metadata.update_metadata()
|
||||
except Failed as e:
|
||||
library.notify(e)
|
||||
logger.error(e)
|
||||
collections_to_run = metadata.get_collections(config.requested_collections)
|
||||
if config.resume_from and config.resume_from not in collections_to_run:
|
||||
logger.info("")
|
||||
logger.warning(f"Collection: {config.resume_from} not in Metadata File: {metadata.path}")
|
||||
continue
|
||||
if collections_to_run:
|
||||
logger.info("")
|
||||
logger.separator(f"{'Test ' if config.test_mode else ''}Collections")
|
||||
logger.remove_library_handler(library.mapping_name)
|
||||
run_collection(config, library, metadata, collections_to_run)
|
||||
logger.re_add_library_handler(library.mapping_name)
|
||||
|
||||
if not config.library_first and not config.test_mode and not collection_only:
|
||||
if not overlays_only and library.library_operation:
|
||||
|
|
Loading…
Reference in a new issue