mirror of
https://github.com/meisnate12/Plex-Meta-Manager
synced 2024-11-22 12:33:10 +00:00
[30] updated plexapi to 4.10.1
This commit is contained in:
parent
3b3343fe07
commit
87f48855e6
8 changed files with 175 additions and 173 deletions
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
|||
1.16.2-develop29
|
||||
1.16.2-develop30
|
||||
|
|
|
@ -2276,8 +2276,8 @@ class CollectionBuilder:
|
|||
tmdb_id = self.config.Convert.tvdb_to_tmdb(self.library.show_rating_key_map[item.ratingKey])
|
||||
names = {s.season_number: s.name for s in self.config.TMDb.get_show(tmdb_id).seasons}
|
||||
for season in self.library.query(item.seasons):
|
||||
if season.index in names:
|
||||
self.library.edit_query(season, {"title.locked": 1, "title.value": names[season.index]})
|
||||
if season.index in names and season.title != names[season.index]:
|
||||
season.editTitle(names[season.index])
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
|
||||
|
@ -2348,7 +2348,6 @@ class CollectionBuilder:
|
|||
self.library.update_smart_collection(self.obj, self.smart_url)
|
||||
logger.info(f"Detail: Smart Filter updated to {self.smart_url}")
|
||||
|
||||
edits = {}
|
||||
def get_summary(summary_method, summaries):
|
||||
logger.info(f"Detail: {summary_method} will update {self.Type} Summary")
|
||||
return summaries[summary_method]
|
||||
|
@ -2374,30 +2373,41 @@ class CollectionBuilder:
|
|||
elif "tvdb_show_details" in self.summaries: summary = get_summary("tvdb_show_details", self.summaries)
|
||||
elif "tmdb_show_details" in self.summaries: summary = get_summary("tmdb_show_details", self.summaries)
|
||||
else: summary = None
|
||||
if summary:
|
||||
if str(summary) != str(self.obj.summary):
|
||||
edits["summary.value"] = summary
|
||||
edits["summary.locked"] = 1
|
||||
|
||||
if "sort_title" in self.details:
|
||||
if str(self.details["sort_title"]) != str(self.obj.titleSort):
|
||||
edits["titleSort.value"] = self.details["sort_title"]
|
||||
edits["titleSort.locked"] = 1
|
||||
logger.info(f"Detail: sort_title will update Collection Sort Title to {self.details['sort_title']}")
|
||||
|
||||
if "content_rating" in self.details:
|
||||
if str(self.details["content_rating"]) != str(self.obj.contentRating):
|
||||
edits["contentRating.value"] = self.details["content_rating"]
|
||||
edits["contentRating.locked"] = 1
|
||||
logger.info(f"Detail: content_rating will update Collection Content Rating to {self.details['content_rating']}")
|
||||
|
||||
if len(edits) > 0:
|
||||
logger.debug(edits)
|
||||
try:
|
||||
if self.playlist:
|
||||
self.obj.edit(summary=str(edits["summary.value"]))
|
||||
if summary and str(summary) != str(self.obj.summary):
|
||||
try:
|
||||
self.obj.edit(summary=str(summary))
|
||||
logger.info(f"Summary | {summary:<25}")
|
||||
logger.info("Details: have been updated")
|
||||
except NotFound:
|
||||
logger.error("Details: Failed to Update Please delete the collection and run again")
|
||||
logger.info("")
|
||||
else:
|
||||
self.library.edit_query(self.obj, edits)
|
||||
self.obj.batchEdits()
|
||||
|
||||
batch_display = "Collection Metadata Edits"
|
||||
if summary and str(summary) != str(self.obj.summary):
|
||||
self.obj.editSummary(summary)
|
||||
batch_display += f"\nSummary | {summary:<25}"
|
||||
|
||||
if "sort_title" in self.details and str(self.details["sort_title"]) != str(self.obj.titleSort):
|
||||
self.obj.editSortTitle(self.details["sort_title"])
|
||||
batch_display += f"\nSort Title | {self.details['sort_title']}"
|
||||
|
||||
if "content_rating" in self.details and str(self.details["content_rating"]) != str(self.obj.contentRating):
|
||||
self.obj.editContentRating(self.details["content_rating"])
|
||||
batch_display += f"\nContent Rating | {self.details['content_rating']}"
|
||||
|
||||
add_tags = self.details["label"] if "label" in self.details else None
|
||||
remove_tags = self.details["label.remove"] if "label.remove" in self.details else None
|
||||
sync_tags = self.details["label.sync"] if "label.sync" in self.details else None
|
||||
batch_display += f"\n{self.library.edit_tags('label', self.obj, add_tags=add_tags, remove_tags=remove_tags, sync_tags=sync_tags, do_print=False)[28:]}"
|
||||
|
||||
logger.info(batch_display)
|
||||
if len(batch_display) > 25:
|
||||
try:
|
||||
self.obj.saveEdits()
|
||||
logger.info("Details: have been updated")
|
||||
except NotFound:
|
||||
logger.error("Details: Failed to Update Please delete the collection and run again")
|
||||
|
@ -2410,7 +2420,7 @@ class CollectionBuilder:
|
|||
if int(self.obj.collectionSort) not in plex.collection_order_keys\
|
||||
or plex.collection_order_keys[int(self.obj.collectionSort)] != self.details["collection_order"]:
|
||||
self.library.collection_order_query(self.obj, self.details["collection_order"])
|
||||
logger.info(f"Detail: collection_order updated Collection Order to {self.details['collection_order']}")
|
||||
logger.info(f"Collection Order | {self.details['collection_order']}")
|
||||
|
||||
if "visible_library" in self.details or "visible_home" in self.details or "visible_shared" in self.details:
|
||||
visibility = self.library.collection_visibility(self.obj)
|
||||
|
@ -2429,12 +2439,7 @@ class CollectionBuilder:
|
|||
|
||||
if visible_library is not None or visible_home is not None or visible_shared is not None:
|
||||
self.library.collection_visibility_update(self.obj, visibility=visibility, library=visible_library, home=visible_home, shared=visible_shared)
|
||||
logger.info("Detail: Collection visibility updated")
|
||||
|
||||
add_tags = self.details["label"] if "label" in self.details else None
|
||||
remove_tags = self.details["label.remove"] if "label.remove" in self.details else None
|
||||
sync_tags = self.details["label.sync"] if "label.sync" in self.details else None
|
||||
self.library.edit_tags("label", self.obj, add_tags=add_tags, remove_tags=remove_tags, sync_tags=sync_tags)
|
||||
logger.info("Collection Visibility Updated")
|
||||
|
||||
poster_image = None
|
||||
background_image = None
|
||||
|
|
|
@ -100,6 +100,7 @@ class Library(ABC):
|
|||
self.library_operation = True if self.items_library_operation or self.delete_unmanaged_collections or self.delete_collections_with_less \
|
||||
or self.radarr_remove_by_tag or self.sonarr_remove_by_tag or self.mass_collection_mode \
|
||||
or self.genre_collections or self.show_unmanaged or self.metadata_backup or self.update_blank_track_titles else False
|
||||
self.meta_operations = [self.mass_genre_update, self.mass_audience_rating_update, self.mass_critic_rating_update, self.mass_content_rating_update, self.mass_originally_available_update]
|
||||
|
||||
if self.asset_directory:
|
||||
logger.info("")
|
||||
|
@ -235,7 +236,7 @@ class Library(ABC):
|
|||
pass
|
||||
|
||||
@abstractmethod
|
||||
def edit_tags(self, attr, obj, add_tags=None, remove_tags=None, sync_tags=None):
|
||||
def edit_tags(self, attr, obj, add_tags=None, remove_tags=None, sync_tags=None, do_print=True):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
|
|
@ -2,7 +2,7 @@ import operator, os, re
|
|||
from datetime import datetime
|
||||
from modules import plex, ergast, util
|
||||
from modules.util import Failed, ImageData
|
||||
from plexapi.exceptions import NotFound
|
||||
from plexapi.exceptions import NotFound, BadRequest
|
||||
from tmdbapis import NotFound as TMDbNotFound
|
||||
from ruamel import yaml
|
||||
|
||||
|
@ -265,8 +265,8 @@ class MetadataFile(DataFile):
|
|||
raise Failed(f"Config Error: {map_name} type attribute: {dynamic[methods['type']]} requires trakt to be configured")
|
||||
else:
|
||||
auto_type = dynamic[methods["type"]].lower()
|
||||
og_exclude = util.parse("Config", "exclude", dynamic, parent=map_name, methods=methods, datatype="list") if "exclude" in methods else []
|
||||
include = util.parse("Config", "include", dynamic, parent=map_name, methods=methods, datatype="list") if "include" in methods else []
|
||||
og_exclude = util.parse("Config", "exclude", dynamic, parent=map_name, methods=methods, datatype="strlist") if "exclude" in methods else []
|
||||
include = util.parse("Config", "include", dynamic, parent=map_name, methods=methods, datatype="strlist") if "include" in methods else []
|
||||
if og_exclude and include:
|
||||
raise Failed(f"Config Error: {map_name} cannot have both include and exclude attributes")
|
||||
addons = util.parse("Config", "addons", dynamic, parent=map_name, methods=methods, datatype="dictliststr") if "addons" in methods else {}
|
||||
|
@ -534,7 +534,7 @@ class MetadataFile(DataFile):
|
|||
add_tags.extend(extra)
|
||||
remove_tags = util.get_list(group[alias[f"{attr}.remove"]]) if f"{attr}.remove" in alias else None
|
||||
sync_tags = util.get_list(group[alias[f"{attr}.sync"]] if group[alias[f"{attr}.sync"]] else []) if f"{attr}.sync" in alias else None
|
||||
return self.library.edit_tags(attr, obj, add_tags=add_tags, remove_tags=remove_tags, sync_tags=sync_tags)
|
||||
return len(self.library.edit_tags(attr, obj, add_tags=add_tags, remove_tags=remove_tags, sync_tags=sync_tags)) > 0
|
||||
return False
|
||||
|
||||
def set_images(self, obj, group, alias):
|
||||
|
@ -569,7 +569,6 @@ class MetadataFile(DataFile):
|
|||
methods = {mm.lower(): mm for mm in meta}
|
||||
|
||||
updated = False
|
||||
edits = {}
|
||||
|
||||
def add_edit(name, current_item, group=None, alias=None, key=None, value=None, var_type="str"):
|
||||
if value or name in alias:
|
||||
|
@ -601,14 +600,25 @@ class MetadataFile(DataFile):
|
|||
else:
|
||||
final_value = value
|
||||
if current != str(final_value):
|
||||
edits[f"{key}.value"] = final_value
|
||||
edits[f"{key}.locked"] = 1
|
||||
if key == "title":
|
||||
current_item.editTitle(final_value)
|
||||
else:
|
||||
current_item.editField(key, final_value)
|
||||
logger.info(f"Detail: {name} updated to {final_value}")
|
||||
updated = True
|
||||
except Failed as ee:
|
||||
logger.error(ee)
|
||||
else:
|
||||
logger.error(f"Metadata Error: {name} attribute is blank")
|
||||
|
||||
def finish_edit(current_item, description):
|
||||
if updated:
|
||||
try:
|
||||
current_item.saveEdits()
|
||||
logger.info(f"{description} Details Update Successful")
|
||||
except BadRequest:
|
||||
logger.error(f"{description} Details Update Failed")
|
||||
|
||||
logger.info("")
|
||||
logger.separator()
|
||||
logger.info("")
|
||||
|
@ -692,7 +702,7 @@ class MetadataFile(DataFile):
|
|||
summary = tmdb_item.overview
|
||||
genres = tmdb_item.genres
|
||||
|
||||
edits = {}
|
||||
item.batchEdits()
|
||||
add_edit("title", item, meta, methods, value=title)
|
||||
add_edit("sort_title", item, meta, methods, key="titleSort")
|
||||
add_edit("user_rating", item, meta, methods, key="userRating", var_type="float")
|
||||
|
@ -705,8 +715,10 @@ class MetadataFile(DataFile):
|
|||
add_edit("studio", item, meta, methods, value=studio)
|
||||
add_edit("tagline", item, meta, methods, value=tagline)
|
||||
add_edit("summary", item, meta, methods, value=summary)
|
||||
if self.library.edit_item(item, mapping_name, self.library.type, edits):
|
||||
for tag_edit in util.tags_to_edit[self.library.type]:
|
||||
if self.edit_tags(tag_edit, item, meta, methods, extra=genres if tag_edit == "genre" else None):
|
||||
updated = True
|
||||
finish_edit(item, f"{self.library.type}: {mapping_name}")
|
||||
|
||||
if self.library.type in util.advance_tags_to_edit:
|
||||
advance_edits = {}
|
||||
|
@ -716,22 +728,18 @@ class MetadataFile(DataFile):
|
|||
if advance_edit in ["metadata_language", "use_original_title"] and self.library.agent not in plex.new_plex_agents:
|
||||
logger.error(f"Metadata Error: {advance_edit} attribute only works for with the New Plex Movie Agent and New Plex TV Agent")
|
||||
elif meta[methods[advance_edit]]:
|
||||
key, options = plex.item_advance_keys[f"item_{advance_edit}"]
|
||||
ad_key, options = plex.item_advance_keys[f"item_{advance_edit}"]
|
||||
method_data = str(meta[methods[advance_edit]]).lower()
|
||||
if method_data not in options:
|
||||
logger.error(f"Metadata Error: {meta[methods[advance_edit]]} {advance_edit} attribute invalid")
|
||||
elif key in prefs and getattr(item, key) != options[method_data]:
|
||||
advance_edits[key] = options[method_data]
|
||||
elif ad_key in prefs and getattr(item, ad_key) != options[method_data]:
|
||||
advance_edits[ad_key] = options[method_data]
|
||||
logger.info(f"Detail: {advance_edit} updated to {method_data}")
|
||||
else:
|
||||
logger.error(f"Metadata Error: {advance_edit} attribute is blank")
|
||||
if self.library.edit_item(item, mapping_name, self.library.type, advance_edits, advanced=True):
|
||||
updated = True
|
||||
|
||||
for tag_edit in util.tags_to_edit[self.library.type]:
|
||||
if self.edit_tags(tag_edit, item, meta, methods, extra=genres if tag_edit == "genre" else None):
|
||||
updated = True
|
||||
|
||||
logger.info(f"{self.library.type}: {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
||||
|
||||
self.set_images(item, meta, methods)
|
||||
|
@ -756,12 +764,11 @@ class MetadataFile(DataFile):
|
|||
logger.error(f"Metadata Error: Season: {season_id} not found")
|
||||
continue
|
||||
season_methods = {sm.lower(): sm for sm in season_dict}
|
||||
edits = {}
|
||||
season.batchEdits()
|
||||
add_edit("title", season, season_dict, season_methods)
|
||||
add_edit("summary", season, season_dict, season_methods)
|
||||
add_edit("user_rating", season, season_dict, season_methods, key="userRating", var_type="float")
|
||||
if self.library.edit_item(season, season_id, "Season", edits):
|
||||
updated = True
|
||||
finish_edit(season, f"Season: {season_id}")
|
||||
self.set_images(season, season_dict, season_methods)
|
||||
logger.info(f"Season {season_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
||||
|
||||
|
@ -785,7 +792,7 @@ class MetadataFile(DataFile):
|
|||
logger.error(f"Metadata Error: Episode {episode_str} in Season {season_id} not found")
|
||||
continue
|
||||
episode_methods = {em.lower(): em for em in episode_dict}
|
||||
edits = {}
|
||||
episode.batchEdits()
|
||||
add_edit("title", episode, episode_dict, episode_methods)
|
||||
add_edit("sort_title", episode, episode_dict, episode_methods, key="titleSort")
|
||||
add_edit("critic_rating", episode, episode_dict, episode_methods, key="rating", var_type="float")
|
||||
|
@ -793,11 +800,10 @@ class MetadataFile(DataFile):
|
|||
add_edit("user_rating", episode, episode_dict, episode_methods, key="userRating", var_type="float")
|
||||
add_edit("originally_available", episode, episode_dict, episode_methods, key="originallyAvailableAt", var_type="date")
|
||||
add_edit("summary", episode, episode_dict, episode_methods)
|
||||
if self.library.edit_item(episode, f"{episode_str} in Season: {season_id}", "Episode", edits):
|
||||
updated = True
|
||||
for tag_edit in ["director", "writer"]:
|
||||
if self.edit_tags(tag_edit, episode, episode_dict, episode_methods):
|
||||
updated = True
|
||||
finish_edit(episode, f"Episode: {episode_str} in Season: {season_id}")
|
||||
self.set_images(episode, episode_dict, episode_methods)
|
||||
logger.info(f"Episode {episode_str} in Season {season_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
||||
|
||||
|
@ -824,7 +830,7 @@ class MetadataFile(DataFile):
|
|||
logger.error(f"Metadata Error: episode {episode_id} of season {season_id} not found")
|
||||
continue
|
||||
episode_methods = {em.lower(): em for em in episode_dict}
|
||||
edits = {}
|
||||
episode.batchEdits()
|
||||
add_edit("title", episode, episode_dict, episode_methods)
|
||||
add_edit("sort_title", episode, episode_dict, episode_methods, key="titleSort")
|
||||
add_edit("critic_rating", episode, episode_dict, episode_methods, key="rating", var_type="float")
|
||||
|
@ -832,11 +838,10 @@ class MetadataFile(DataFile):
|
|||
add_edit("user_rating", episode, episode_dict, episode_methods, key="userRating", var_type="float")
|
||||
add_edit("originally_available", episode, episode_dict, episode_methods, key="originallyAvailableAt", var_type="date")
|
||||
add_edit("summary", episode, episode_dict, episode_methods)
|
||||
if self.library.edit_item(episode, f"{season_id} Episode: {episode_id}", "Season", edits):
|
||||
updated = True
|
||||
for tag_edit in ["director", "writer"]:
|
||||
if self.edit_tags(tag_edit, episode, episode_dict, episode_methods):
|
||||
updated = True
|
||||
finish_edit(episode, f"Episode: {episode_str} in Season: {season_id}")
|
||||
self.set_images(episode, episode_dict, episode_methods)
|
||||
logger.info(f"Episode S{season_id}E{episode_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
||||
|
||||
|
@ -863,7 +868,7 @@ class MetadataFile(DataFile):
|
|||
continue
|
||||
if not title:
|
||||
title = album.title
|
||||
edits = {}
|
||||
album.batchEdits()
|
||||
add_edit("title", album, album_dict, album_methods, value=title)
|
||||
add_edit("sort_title", album, album_dict, album_methods, key="titleSort")
|
||||
add_edit("critic_rating", album, album_dict, album_methods, key="rating", var_type="float")
|
||||
|
@ -871,11 +876,10 @@ class MetadataFile(DataFile):
|
|||
add_edit("originally_available", album, album_dict, album_methods, key="originallyAvailableAt", var_type="date")
|
||||
add_edit("record_label", album, album_dict, album_methods, key="studio")
|
||||
add_edit("summary", album, album_dict, album_methods)
|
||||
if self.library.edit_item(album, title, "Album", edits):
|
||||
updated = True
|
||||
for tag_edit in ["genre", "style", "mood", "collection", "label"]:
|
||||
if self.edit_tags(tag_edit, album, album_dict, album_methods):
|
||||
updated = True
|
||||
finish_edit(album, f"Album: {title}")
|
||||
self.set_images(album, album_dict, album_methods)
|
||||
logger.info(f"Album: {title} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
||||
|
||||
|
@ -906,16 +910,15 @@ class MetadataFile(DataFile):
|
|||
|
||||
if not title:
|
||||
title = track.title
|
||||
edits = {}
|
||||
track.batchEdits()
|
||||
add_edit("title", track, track_dict, track_methods, value=title)
|
||||
add_edit("user_rating", track, track_dict, track_methods, key="userRating", var_type="float")
|
||||
add_edit("track", track, track_dict, track_methods, key="index", var_type="int")
|
||||
add_edit("disc", track, track_dict, track_methods, key="parentIndex", var_type="int")
|
||||
add_edit("original_artist", track, track_dict, track_methods, key="originalTitle")
|
||||
if self.library.edit_item(track, title, "Track", edits):
|
||||
updated = True
|
||||
if self.edit_tags("mood", track, track_dict, track_methods):
|
||||
updated = True
|
||||
finish_edit(track, f"Track: {title}")
|
||||
logger.info(f"Track: {track_num} on Album: {title} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
||||
|
||||
if "f1_season" in methods and self.library.is_show:
|
||||
|
@ -964,19 +967,17 @@ class MetadataFile(DataFile):
|
|||
race = race_lookup[season.seasonNumber]
|
||||
title = race.format_name(round_prefix, shorten_gp)
|
||||
updated = False
|
||||
edits = {}
|
||||
season.batchEdits()
|
||||
add_edit("title", season, value=title)
|
||||
if self.library.edit_item(season, title, "Season", edits):
|
||||
updated = True
|
||||
finish_edit(season, f"Season: {title}")
|
||||
logger.info(f"Race {season.seasonNumber} of F1 Season {f1_season}: Details Update {'Complete' if updated else 'Not Needed'}")
|
||||
for episode in season.episodes():
|
||||
if len(episode.locations) > 0:
|
||||
ep_title, session_date = race.session_info(episode.locations[0], sprint_weekend)
|
||||
edits = {}
|
||||
episode.batchEdits()
|
||||
add_edit("title", episode, value=ep_title)
|
||||
add_edit("originally_available", episode, key="originallyAvailableAt", var_type="date", value=session_date)
|
||||
if self.library.edit_item(episode, f"{season.seasonNumber} Episode: {episode.episodeNumber}", "Season", edits):
|
||||
updated = True
|
||||
finish_edit(episode, f"Season: {season.seasonNumber} Episode: {episode.episodeNumber}")
|
||||
logger.info(f"Session {episode.title}: Details Update {'Complete' if updated else 'Not Needed'}")
|
||||
else:
|
||||
logger.warning(f"Ergast Error: No Round: {season.seasonNumber} for Season {f1_season}")
|
||||
|
|
|
@ -515,7 +515,7 @@ class Plex(Library):
|
|||
def collection_mode_query(self, collection, data):
|
||||
if int(collection.collectionMode) not in collection_mode_keys or collection_mode_keys[int(collection.collectionMode)] != data:
|
||||
collection.modeUpdate(mode=data)
|
||||
logger.info(f"Detail: collection_order updated Collection Order to {data}")
|
||||
logger.info(f"Collection Mode | data")
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
||||
def collection_order_query(self, collection, data):
|
||||
|
@ -830,8 +830,9 @@ class Plex(Library):
|
|||
logger.error(f"{item_type}: {name}{' Advanced' if advanced else ''} Details Update Failed")
|
||||
return False
|
||||
|
||||
def edit_tags(self, attr, obj, add_tags=None, remove_tags=None, sync_tags=None):
|
||||
def edit_tags(self, attr, obj, add_tags=None, remove_tags=None, sync_tags=None, do_print=True):
|
||||
display = ""
|
||||
final = ""
|
||||
key = builder.filter_translation[attr] if attr in builder.filter_translation else attr
|
||||
attr_display = attr.replace("_", " ").title()
|
||||
attr_call = attr_display.replace(" ", "")
|
||||
|
@ -852,9 +853,10 @@ class Plex(Library):
|
|||
if _remove:
|
||||
self.query_data(getattr(obj, f"remove{attr_call}"), _remove)
|
||||
display += f"-{', -'.join(_remove)}"
|
||||
if len(display) > 0:
|
||||
logger.info(f"{obj.title[:25]:<25} | {attr_display} | {display}")
|
||||
return len(display) > 0
|
||||
final = f"{obj.title[:25]:<25} | {attr_display} | {display}" if display else display
|
||||
if do_print:
|
||||
logger.info(final)
|
||||
return final
|
||||
|
||||
def find_assets(self, item, name=None, upload=True, overlay=None, folders=None, create=None):
|
||||
if isinstance(item, (Movie, Artist, Show)):
|
||||
|
|
|
@ -426,7 +426,7 @@ def parse(error, attribute, data, datatype=None, methods=None, parent=None, defa
|
|||
options = [o for o in translation]
|
||||
value = data[methods[attribute]] if methods and attribute in methods else data
|
||||
|
||||
if datatype in ["list", "commalist"]:
|
||||
if datatype in ["list", "commalist", "strlist"]:
|
||||
final_list = []
|
||||
if value:
|
||||
if datatype == "commalist":
|
||||
|
@ -435,8 +435,8 @@ def parse(error, attribute, data, datatype=None, methods=None, parent=None, defa
|
|||
value = [value]
|
||||
for v in value:
|
||||
if v:
|
||||
if options is None or (options and v in options):
|
||||
final_list.append(v)
|
||||
if options is None or (options and (v in options or (datatype == "strlist" and str(v) in options))):
|
||||
final_list.append(str(v) if datatype == "strlist" else v)
|
||||
elif options:
|
||||
raise Failed(f"{error} Error: {v} is invalid options are: {options}")
|
||||
return final_list
|
||||
|
|
|
@ -116,7 +116,7 @@ with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "VERSION")) a
|
|||
version = util.parse_version(line)
|
||||
break
|
||||
|
||||
plexapi.BASE_HEADERS['X-Plex-Client-Identifier'] = "Plex-Meta-Manager"
|
||||
plexapi.BASE_HEADERS["X-Plex-Client-Identifier"] = "Plex-Meta-Manager"
|
||||
|
||||
def start(attrs):
|
||||
logger.add_main_handler()
|
||||
|
@ -186,7 +186,7 @@ def start(attrs):
|
|||
logger.critical(e)
|
||||
logger.info("")
|
||||
end_time = datetime.now()
|
||||
run_time = str(end_time - start_time).split('.')[0]
|
||||
run_time = str(end_time - start_time).split(".")[0]
|
||||
if config:
|
||||
try:
|
||||
config.Webhooks.end_time_hooks(start_time, end_time, run_time, stats)
|
||||
|
@ -438,12 +438,12 @@ def library_operations(config, library):
|
|||
if library.update_blank_track_titles:
|
||||
tracks = library.get_all(collection_level="track")
|
||||
num_edited = 0
|
||||
for i, item in enumerate(tracks, 1):
|
||||
logger.ghost(f"Processing Track: {i}/{len(tracks)} {item.title}")
|
||||
if not item.title and item.titleSort:
|
||||
library.edit_query(item, {"title.locked": 1, "title.value": item.titleSort})
|
||||
for i, track in enumerate(tracks, 1):
|
||||
logger.ghost(f"Processing Track: {i}/{len(tracks)} {track.title}")
|
||||
if not track.title and track.titleSort:
|
||||
track.editTitle(track.titleSort)
|
||||
num_edited += 1
|
||||
logger.info(f"Track: {item.titleSort} was updated with sort title")
|
||||
logger.info(f"Track: {track.titleSort} was updated with sort title")
|
||||
logger.info(f"{len(tracks)} Tracks Processed; {num_edited} Blank Track Titles Updated")
|
||||
|
||||
tmdb_collections = {}
|
||||
|
@ -469,6 +469,9 @@ def library_operations(config, library):
|
|||
library.find_assets(item)
|
||||
tmdb_id, tvdb_id, imdb_id = library.get_ids(item)
|
||||
|
||||
item.batchEdits()
|
||||
batch_display = "Batch Edits"
|
||||
|
||||
if library.mass_trakt_rating_update:
|
||||
try:
|
||||
if library.is_movie and tmdb_id in trakt_ratings:
|
||||
|
@ -478,7 +481,7 @@ def library_operations(config, library):
|
|||
else:
|
||||
raise Failed
|
||||
if str(item.userRating) != str(new_rating):
|
||||
library.edit_query(item, {"userRating.value": new_rating, "userRating.locked": 1})
|
||||
library.query_data(item.rate, new_rating)
|
||||
logger.info(f"{item.title[:25]:<25} | User Rating | {new_rating}")
|
||||
except Failed:
|
||||
pass
|
||||
|
@ -487,7 +490,7 @@ def library_operations(config, library):
|
|||
try:
|
||||
parental_guide = config.IMDb.parental_guide(imdb_id)
|
||||
labels = [f"{k.capitalize()}:{v}" for k, v in parental_guide.items() if library.mass_imdb_parental_labels == "with_none" or v != "None"]
|
||||
library.edit_tags("label", item, add_tags=labels)
|
||||
batch_display += f"\n{library.edit_tags('label', item, add_tags=labels)}"
|
||||
except Failed:
|
||||
pass
|
||||
|
||||
|
@ -498,19 +501,15 @@ def library_operations(config, library):
|
|||
radarr_adds.append((tmdb_id, path))
|
||||
if library.Sonarr and library.sonarr_add_all_existing and tvdb_id:
|
||||
path = path.replace(library.Sonarr.plex_path, library.Sonarr.sonarr_path)
|
||||
path = path[:-1] if path.endswith(('/', '\\')) else path
|
||||
path = path[:-1] if path.endswith(("/", "\\")) else path
|
||||
sonarr_adds.append((tvdb_id, path))
|
||||
|
||||
tmdb_item = None
|
||||
if library.tmdb_collections or library.mass_genre_update == "tmdb" or library.mass_audience_rating_update == "tmdb" \
|
||||
or library.mass_critic_rating_update == "tmdb" or library.mass_originally_available_update == "tmdb" \
|
||||
or library.mass_content_rating_update == "tmdb":
|
||||
if library.tmdb_collections or any([o == "tmdb" for o in library.meta_operations]):
|
||||
tmdb_item = config.TMDb.get_item(item, tmdb_id, tvdb_id, imdb_id, is_movie=library.is_movie)
|
||||
|
||||
omdb_item = None
|
||||
if library.mass_genre_update == "omdb" or library.mass_audience_rating_update == "omdb" \
|
||||
or library.mass_critic_rating_update == "omdb" or library.mass_content_rating_update == "omdb" \
|
||||
or library.mass_originally_available_update == "omdb":
|
||||
if any([o == "omdb" for o in library.meta_operations]):
|
||||
if config.OMDb.limit is False:
|
||||
if tmdb_id and not imdb_id:
|
||||
imdb_id = config.Convert.tmdb_to_imdb(tmdb_id)
|
||||
|
@ -528,7 +527,7 @@ def library_operations(config, library):
|
|||
logger.info(f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}")
|
||||
|
||||
tvdb_item = None
|
||||
if library.mass_genre_update == "tvdb" or library.mass_originally_available_update == "tvdb":
|
||||
if any([o == "tvdb" for o in library.meta_operations]):
|
||||
if tvdb_id:
|
||||
try:
|
||||
tvdb_item = config.TVDb.get_item(tvdb_id, library.is_movie)
|
||||
|
@ -538,7 +537,7 @@ def library_operations(config, library):
|
|||
logger.info(f"{item.title[:25]:<25} | No TVDb ID for Guid: {item.guid}")
|
||||
|
||||
anidb_item = None
|
||||
if library.mass_genre_update == "anidb":
|
||||
if any([o == "anidb" for o in library.meta_operations]):
|
||||
if item.ratingKey in reverse_anidb:
|
||||
anidb_id = reverse_anidb[item.ratingKey]
|
||||
elif tvdb_id in config.Convert._tvdb_to_anidb:
|
||||
|
@ -555,8 +554,7 @@ def library_operations(config, library):
|
|||
logger.error(str(e))
|
||||
|
||||
mdb_item = None
|
||||
if library.mass_audience_rating_update in util.mdb_types or library.mass_critic_rating_update in util.mdb_types \
|
||||
or library.mass_content_rating_update in ["mdb", "mdb_commonsense"] or library.mass_originally_available_update == "mdb":
|
||||
if any([o and o.startswith("mdb") for o in library.meta_operations]):
|
||||
if config.Mdblist.limit is False:
|
||||
if tmdb_id and not imdb_id:
|
||||
imdb_id = config.Convert.tmdb_to_imdb(tmdb_id)
|
||||
|
@ -607,7 +605,6 @@ def library_operations(config, library):
|
|||
raise Failed
|
||||
|
||||
if library.mass_genre_update or library.genre_mapper:
|
||||
try:
|
||||
new_genres = []
|
||||
if library.mass_genre_update:
|
||||
if tmdb_item and library.mass_genre_update == "tmdb":
|
||||
|
@ -633,29 +630,23 @@ def library_operations(config, library):
|
|||
else:
|
||||
mapped_genres.append(genre)
|
||||
new_genres = mapped_genres
|
||||
library.edit_tags("genre", item, sync_tags=new_genres)
|
||||
except Failed:
|
||||
pass
|
||||
batch_display += f"\n{library.edit_tags('genre', item, sync_tags=new_genres)}"
|
||||
|
||||
if library.mass_audience_rating_update:
|
||||
try:
|
||||
new_rating = get_rating(library.mass_audience_rating_update)
|
||||
if new_rating is None:
|
||||
logger.info(f"{item.title[:25]:<25} | No Rating Found")
|
||||
elif str(item.audienceRating) != str(new_rating):
|
||||
library.edit_query(item, {"audienceRating.value": new_rating, "audienceRating.locked": 1})
|
||||
logger.info(f"{item.title[:25]:<25} | Audience Rating | {new_rating}")
|
||||
except Failed:
|
||||
pass
|
||||
item.editField("audienceRating", new_rating)
|
||||
batch_display += f"\n{item.title[:25]:<25} | Audience Rating | {new_rating}"
|
||||
|
||||
if library.mass_critic_rating_update:
|
||||
try:
|
||||
new_rating = get_rating(library.mass_critic_rating_update)
|
||||
if new_rating is None:
|
||||
logger.info(f"{item.title[:25]:<25} | No Rating Found")
|
||||
elif str(item.rating) != str(new_rating):
|
||||
library.edit_query(item, {"rating.value": new_rating, "rating.locked": 1})
|
||||
logger.info(f"{item.title[:25]:<25} | Critic Rating | {new_rating}")
|
||||
except Failed:
|
||||
pass
|
||||
item.editField("rating", new_rating)
|
||||
batch_display += f"{item.title[:25]:<25} | Critic Rating | {new_rating}"
|
||||
if library.mass_content_rating_update or library.content_rating_mapper:
|
||||
try:
|
||||
new_rating = None
|
||||
|
@ -678,8 +669,8 @@ def library_operations(config, library):
|
|||
if new_rating in library.content_rating_mapper:
|
||||
new_rating = library.content_rating_mapper[new_rating]
|
||||
if str(item.contentRating) != str(new_rating):
|
||||
library.edit_query(item, {"contentRating.value": new_rating, "contentRating.locked": 1})
|
||||
logger.info(f"{item.title[:25]:<25} | Content Rating | {new_rating}")
|
||||
item.editContentRating(new_rating)
|
||||
batch_display += f"\n{item.title[:25]:<25} | Content Rating | {new_rating}"
|
||||
except Failed:
|
||||
pass
|
||||
if library.mass_originally_available_update:
|
||||
|
@ -698,12 +689,14 @@ def library_operations(config, library):
|
|||
raise Failed
|
||||
if new_date is None:
|
||||
logger.info(f"{item.title[:25]:<25} | No Originally Available Date Found")
|
||||
elif str(item.rating) != str(new_date):
|
||||
library.edit_query(item, {"originallyAvailableAt.value": new_date.strftime("%Y-%m-%d"), "originallyAvailableAt.locked": 1})
|
||||
logger.info(f"{item.title[:25]:<25} | Originally Available Date | {new_date.strftime('%Y-%m-%d')}")
|
||||
elif str(item.originallyAvailableAt) != str(new_date):
|
||||
item.editOriginallyAvailable(new_date)
|
||||
batch_display += f"\n{item.title[:25]:<25} | Originally Available Date | {new_date.strftime('%Y-%m-%d')}"
|
||||
except Failed:
|
||||
pass
|
||||
|
||||
item.saveEdits()
|
||||
|
||||
if library.Radarr and library.radarr_add_all_existing:
|
||||
try:
|
||||
library.Radarr.add_tmdb(radarr_adds)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
PlexAPI==4.9.2
|
||||
PlexAPI==4.10.1
|
||||
tmdbapis==1.0.3
|
||||
arrapi==1.3.1
|
||||
lxml==4.8.0
|
||||
|
|
Loading…
Reference in a new issue