mirror of
https://github.com/meisnate12/Plex-Meta-Manager
synced 2024-11-22 12:33:10 +00:00
[30] updated plexapi to 4.10.1
This commit is contained in:
parent
3b3343fe07
commit
87f48855e6
8 changed files with 175 additions and 173 deletions
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
||||||
1.16.2-develop29
|
1.16.2-develop30
|
||||||
|
|
|
@ -2276,8 +2276,8 @@ class CollectionBuilder:
|
||||||
tmdb_id = self.config.Convert.tvdb_to_tmdb(self.library.show_rating_key_map[item.ratingKey])
|
tmdb_id = self.config.Convert.tvdb_to_tmdb(self.library.show_rating_key_map[item.ratingKey])
|
||||||
names = {s.season_number: s.name for s in self.config.TMDb.get_show(tmdb_id).seasons}
|
names = {s.season_number: s.name for s in self.config.TMDb.get_show(tmdb_id).seasons}
|
||||||
for season in self.library.query(item.seasons):
|
for season in self.library.query(item.seasons):
|
||||||
if season.index in names:
|
if season.index in names and season.title != names[season.index]:
|
||||||
self.library.edit_query(season, {"title.locked": 1, "title.value": names[season.index]})
|
season.editTitle(names[season.index])
|
||||||
except Failed as e:
|
except Failed as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
|
|
||||||
|
@ -2348,7 +2348,6 @@ class CollectionBuilder:
|
||||||
self.library.update_smart_collection(self.obj, self.smart_url)
|
self.library.update_smart_collection(self.obj, self.smart_url)
|
||||||
logger.info(f"Detail: Smart Filter updated to {self.smart_url}")
|
logger.info(f"Detail: Smart Filter updated to {self.smart_url}")
|
||||||
|
|
||||||
edits = {}
|
|
||||||
def get_summary(summary_method, summaries):
|
def get_summary(summary_method, summaries):
|
||||||
logger.info(f"Detail: {summary_method} will update {self.Type} Summary")
|
logger.info(f"Detail: {summary_method} will update {self.Type} Summary")
|
||||||
return summaries[summary_method]
|
return summaries[summary_method]
|
||||||
|
@ -2374,67 +2373,73 @@ class CollectionBuilder:
|
||||||
elif "tvdb_show_details" in self.summaries: summary = get_summary("tvdb_show_details", self.summaries)
|
elif "tvdb_show_details" in self.summaries: summary = get_summary("tvdb_show_details", self.summaries)
|
||||||
elif "tmdb_show_details" in self.summaries: summary = get_summary("tmdb_show_details", self.summaries)
|
elif "tmdb_show_details" in self.summaries: summary = get_summary("tmdb_show_details", self.summaries)
|
||||||
else: summary = None
|
else: summary = None
|
||||||
if summary:
|
|
||||||
if str(summary) != str(self.obj.summary):
|
|
||||||
edits["summary.value"] = summary
|
|
||||||
edits["summary.locked"] = 1
|
|
||||||
|
|
||||||
if "sort_title" in self.details:
|
if self.playlist:
|
||||||
if str(self.details["sort_title"]) != str(self.obj.titleSort):
|
if summary and str(summary) != str(self.obj.summary):
|
||||||
edits["titleSort.value"] = self.details["sort_title"]
|
try:
|
||||||
edits["titleSort.locked"] = 1
|
self.obj.edit(summary=str(summary))
|
||||||
logger.info(f"Detail: sort_title will update Collection Sort Title to {self.details['sort_title']}")
|
logger.info(f"Summary | {summary:<25}")
|
||||||
|
logger.info("Details: have been updated")
|
||||||
|
except NotFound:
|
||||||
|
logger.error("Details: Failed to Update Please delete the collection and run again")
|
||||||
|
logger.info("")
|
||||||
|
else:
|
||||||
|
self.obj.batchEdits()
|
||||||
|
|
||||||
if "content_rating" in self.details:
|
batch_display = "Collection Metadata Edits"
|
||||||
if str(self.details["content_rating"]) != str(self.obj.contentRating):
|
if summary and str(summary) != str(self.obj.summary):
|
||||||
edits["contentRating.value"] = self.details["content_rating"]
|
self.obj.editSummary(summary)
|
||||||
edits["contentRating.locked"] = 1
|
batch_display += f"\nSummary | {summary:<25}"
|
||||||
logger.info(f"Detail: content_rating will update Collection Content Rating to {self.details['content_rating']}")
|
|
||||||
|
|
||||||
if len(edits) > 0:
|
if "sort_title" in self.details and str(self.details["sort_title"]) != str(self.obj.titleSort):
|
||||||
logger.debug(edits)
|
self.obj.editSortTitle(self.details["sort_title"])
|
||||||
try:
|
batch_display += f"\nSort Title | {self.details['sort_title']}"
|
||||||
if self.playlist:
|
|
||||||
self.obj.edit(summary=str(edits["summary.value"]))
|
|
||||||
else:
|
|
||||||
self.library.edit_query(self.obj, edits)
|
|
||||||
logger.info("Details: have been updated")
|
|
||||||
except NotFound:
|
|
||||||
logger.error("Details: Failed to Update Please delete the collection and run again")
|
|
||||||
logger.info("")
|
|
||||||
|
|
||||||
if "collection_mode" in self.details:
|
if "content_rating" in self.details and str(self.details["content_rating"]) != str(self.obj.contentRating):
|
||||||
self.library.collection_mode_query(self.obj, self.details["collection_mode"])
|
self.obj.editContentRating(self.details["content_rating"])
|
||||||
|
batch_display += f"\nContent Rating | {self.details['content_rating']}"
|
||||||
|
|
||||||
if "collection_order" in self.details:
|
add_tags = self.details["label"] if "label" in self.details else None
|
||||||
if int(self.obj.collectionSort) not in plex.collection_order_keys\
|
remove_tags = self.details["label.remove"] if "label.remove" in self.details else None
|
||||||
or plex.collection_order_keys[int(self.obj.collectionSort)] != self.details["collection_order"]:
|
sync_tags = self.details["label.sync"] if "label.sync" in self.details else None
|
||||||
self.library.collection_order_query(self.obj, self.details["collection_order"])
|
batch_display += f"\n{self.library.edit_tags('label', self.obj, add_tags=add_tags, remove_tags=remove_tags, sync_tags=sync_tags, do_print=False)[28:]}"
|
||||||
logger.info(f"Detail: collection_order updated Collection Order to {self.details['collection_order']}")
|
|
||||||
|
|
||||||
if "visible_library" in self.details or "visible_home" in self.details or "visible_shared" in self.details:
|
logger.info(batch_display)
|
||||||
visibility = self.library.collection_visibility(self.obj)
|
if len(batch_display) > 25:
|
||||||
visible_library = None
|
try:
|
||||||
visible_home = None
|
self.obj.saveEdits()
|
||||||
visible_shared = None
|
logger.info("Details: have been updated")
|
||||||
|
except NotFound:
|
||||||
|
logger.error("Details: Failed to Update Please delete the collection and run again")
|
||||||
|
logger.info("")
|
||||||
|
|
||||||
if "visible_library" in self.details and self.details["visible_library"] != visibility["library"]:
|
if "collection_mode" in self.details:
|
||||||
visible_library = self.details["visible_library"]
|
self.library.collection_mode_query(self.obj, self.details["collection_mode"])
|
||||||
|
|
||||||
if "visible_home" in self.details and self.details["visible_home"] != visibility["home"]:
|
if "collection_order" in self.details:
|
||||||
visible_home = self.details["visible_home"]
|
if int(self.obj.collectionSort) not in plex.collection_order_keys\
|
||||||
|
or plex.collection_order_keys[int(self.obj.collectionSort)] != self.details["collection_order"]:
|
||||||
|
self.library.collection_order_query(self.obj, self.details["collection_order"])
|
||||||
|
logger.info(f"Collection Order | {self.details['collection_order']}")
|
||||||
|
|
||||||
if "visible_shared" in self.details and self.details["visible_shared"] != visibility["shared"]:
|
if "visible_library" in self.details or "visible_home" in self.details or "visible_shared" in self.details:
|
||||||
visible_shared = self.details["visible_shared"]
|
visibility = self.library.collection_visibility(self.obj)
|
||||||
|
visible_library = None
|
||||||
|
visible_home = None
|
||||||
|
visible_shared = None
|
||||||
|
|
||||||
if visible_library is not None or visible_home is not None or visible_shared is not None:
|
if "visible_library" in self.details and self.details["visible_library"] != visibility["library"]:
|
||||||
self.library.collection_visibility_update(self.obj, visibility=visibility, library=visible_library, home=visible_home, shared=visible_shared)
|
visible_library = self.details["visible_library"]
|
||||||
logger.info("Detail: Collection visibility updated")
|
|
||||||
|
|
||||||
add_tags = self.details["label"] if "label" in self.details else None
|
if "visible_home" in self.details and self.details["visible_home"] != visibility["home"]:
|
||||||
remove_tags = self.details["label.remove"] if "label.remove" in self.details else None
|
visible_home = self.details["visible_home"]
|
||||||
sync_tags = self.details["label.sync"] if "label.sync" in self.details else None
|
|
||||||
self.library.edit_tags("label", self.obj, add_tags=add_tags, remove_tags=remove_tags, sync_tags=sync_tags)
|
if "visible_shared" in self.details and self.details["visible_shared"] != visibility["shared"]:
|
||||||
|
visible_shared = self.details["visible_shared"]
|
||||||
|
|
||||||
|
if visible_library is not None or visible_home is not None or visible_shared is not None:
|
||||||
|
self.library.collection_visibility_update(self.obj, visibility=visibility, library=visible_library, home=visible_home, shared=visible_shared)
|
||||||
|
logger.info("Collection Visibility Updated")
|
||||||
|
|
||||||
poster_image = None
|
poster_image = None
|
||||||
background_image = None
|
background_image = None
|
||||||
|
|
|
@ -100,6 +100,7 @@ class Library(ABC):
|
||||||
self.library_operation = True if self.items_library_operation or self.delete_unmanaged_collections or self.delete_collections_with_less \
|
self.library_operation = True if self.items_library_operation or self.delete_unmanaged_collections or self.delete_collections_with_less \
|
||||||
or self.radarr_remove_by_tag or self.sonarr_remove_by_tag or self.mass_collection_mode \
|
or self.radarr_remove_by_tag or self.sonarr_remove_by_tag or self.mass_collection_mode \
|
||||||
or self.genre_collections or self.show_unmanaged or self.metadata_backup or self.update_blank_track_titles else False
|
or self.genre_collections or self.show_unmanaged or self.metadata_backup or self.update_blank_track_titles else False
|
||||||
|
self.meta_operations = [self.mass_genre_update, self.mass_audience_rating_update, self.mass_critic_rating_update, self.mass_content_rating_update, self.mass_originally_available_update]
|
||||||
|
|
||||||
if self.asset_directory:
|
if self.asset_directory:
|
||||||
logger.info("")
|
logger.info("")
|
||||||
|
@ -235,7 +236,7 @@ class Library(ABC):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def edit_tags(self, attr, obj, add_tags=None, remove_tags=None, sync_tags=None):
|
def edit_tags(self, attr, obj, add_tags=None, remove_tags=None, sync_tags=None, do_print=True):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
|
|
@ -2,7 +2,7 @@ import operator, os, re
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from modules import plex, ergast, util
|
from modules import plex, ergast, util
|
||||||
from modules.util import Failed, ImageData
|
from modules.util import Failed, ImageData
|
||||||
from plexapi.exceptions import NotFound
|
from plexapi.exceptions import NotFound, BadRequest
|
||||||
from tmdbapis import NotFound as TMDbNotFound
|
from tmdbapis import NotFound as TMDbNotFound
|
||||||
from ruamel import yaml
|
from ruamel import yaml
|
||||||
|
|
||||||
|
@ -265,8 +265,8 @@ class MetadataFile(DataFile):
|
||||||
raise Failed(f"Config Error: {map_name} type attribute: {dynamic[methods['type']]} requires trakt to be configured")
|
raise Failed(f"Config Error: {map_name} type attribute: {dynamic[methods['type']]} requires trakt to be configured")
|
||||||
else:
|
else:
|
||||||
auto_type = dynamic[methods["type"]].lower()
|
auto_type = dynamic[methods["type"]].lower()
|
||||||
og_exclude = util.parse("Config", "exclude", dynamic, parent=map_name, methods=methods, datatype="list") if "exclude" in methods else []
|
og_exclude = util.parse("Config", "exclude", dynamic, parent=map_name, methods=methods, datatype="strlist") if "exclude" in methods else []
|
||||||
include = util.parse("Config", "include", dynamic, parent=map_name, methods=methods, datatype="list") if "include" in methods else []
|
include = util.parse("Config", "include", dynamic, parent=map_name, methods=methods, datatype="strlist") if "include" in methods else []
|
||||||
if og_exclude and include:
|
if og_exclude and include:
|
||||||
raise Failed(f"Config Error: {map_name} cannot have both include and exclude attributes")
|
raise Failed(f"Config Error: {map_name} cannot have both include and exclude attributes")
|
||||||
addons = util.parse("Config", "addons", dynamic, parent=map_name, methods=methods, datatype="dictliststr") if "addons" in methods else {}
|
addons = util.parse("Config", "addons", dynamic, parent=map_name, methods=methods, datatype="dictliststr") if "addons" in methods else {}
|
||||||
|
@ -534,7 +534,7 @@ class MetadataFile(DataFile):
|
||||||
add_tags.extend(extra)
|
add_tags.extend(extra)
|
||||||
remove_tags = util.get_list(group[alias[f"{attr}.remove"]]) if f"{attr}.remove" in alias else None
|
remove_tags = util.get_list(group[alias[f"{attr}.remove"]]) if f"{attr}.remove" in alias else None
|
||||||
sync_tags = util.get_list(group[alias[f"{attr}.sync"]] if group[alias[f"{attr}.sync"]] else []) if f"{attr}.sync" in alias else None
|
sync_tags = util.get_list(group[alias[f"{attr}.sync"]] if group[alias[f"{attr}.sync"]] else []) if f"{attr}.sync" in alias else None
|
||||||
return self.library.edit_tags(attr, obj, add_tags=add_tags, remove_tags=remove_tags, sync_tags=sync_tags)
|
return len(self.library.edit_tags(attr, obj, add_tags=add_tags, remove_tags=remove_tags, sync_tags=sync_tags)) > 0
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def set_images(self, obj, group, alias):
|
def set_images(self, obj, group, alias):
|
||||||
|
@ -569,7 +569,6 @@ class MetadataFile(DataFile):
|
||||||
methods = {mm.lower(): mm for mm in meta}
|
methods = {mm.lower(): mm for mm in meta}
|
||||||
|
|
||||||
updated = False
|
updated = False
|
||||||
edits = {}
|
|
||||||
|
|
||||||
def add_edit(name, current_item, group=None, alias=None, key=None, value=None, var_type="str"):
|
def add_edit(name, current_item, group=None, alias=None, key=None, value=None, var_type="str"):
|
||||||
if value or name in alias:
|
if value or name in alias:
|
||||||
|
@ -601,14 +600,25 @@ class MetadataFile(DataFile):
|
||||||
else:
|
else:
|
||||||
final_value = value
|
final_value = value
|
||||||
if current != str(final_value):
|
if current != str(final_value):
|
||||||
edits[f"{key}.value"] = final_value
|
if key == "title":
|
||||||
edits[f"{key}.locked"] = 1
|
current_item.editTitle(final_value)
|
||||||
|
else:
|
||||||
|
current_item.editField(key, final_value)
|
||||||
logger.info(f"Detail: {name} updated to {final_value}")
|
logger.info(f"Detail: {name} updated to {final_value}")
|
||||||
|
updated = True
|
||||||
except Failed as ee:
|
except Failed as ee:
|
||||||
logger.error(ee)
|
logger.error(ee)
|
||||||
else:
|
else:
|
||||||
logger.error(f"Metadata Error: {name} attribute is blank")
|
logger.error(f"Metadata Error: {name} attribute is blank")
|
||||||
|
|
||||||
|
def finish_edit(current_item, description):
|
||||||
|
if updated:
|
||||||
|
try:
|
||||||
|
current_item.saveEdits()
|
||||||
|
logger.info(f"{description} Details Update Successful")
|
||||||
|
except BadRequest:
|
||||||
|
logger.error(f"{description} Details Update Failed")
|
||||||
|
|
||||||
logger.info("")
|
logger.info("")
|
||||||
logger.separator()
|
logger.separator()
|
||||||
logger.info("")
|
logger.info("")
|
||||||
|
@ -692,7 +702,7 @@ class MetadataFile(DataFile):
|
||||||
summary = tmdb_item.overview
|
summary = tmdb_item.overview
|
||||||
genres = tmdb_item.genres
|
genres = tmdb_item.genres
|
||||||
|
|
||||||
edits = {}
|
item.batchEdits()
|
||||||
add_edit("title", item, meta, methods, value=title)
|
add_edit("title", item, meta, methods, value=title)
|
||||||
add_edit("sort_title", item, meta, methods, key="titleSort")
|
add_edit("sort_title", item, meta, methods, key="titleSort")
|
||||||
add_edit("user_rating", item, meta, methods, key="userRating", var_type="float")
|
add_edit("user_rating", item, meta, methods, key="userRating", var_type="float")
|
||||||
|
@ -705,8 +715,10 @@ class MetadataFile(DataFile):
|
||||||
add_edit("studio", item, meta, methods, value=studio)
|
add_edit("studio", item, meta, methods, value=studio)
|
||||||
add_edit("tagline", item, meta, methods, value=tagline)
|
add_edit("tagline", item, meta, methods, value=tagline)
|
||||||
add_edit("summary", item, meta, methods, value=summary)
|
add_edit("summary", item, meta, methods, value=summary)
|
||||||
if self.library.edit_item(item, mapping_name, self.library.type, edits):
|
for tag_edit in util.tags_to_edit[self.library.type]:
|
||||||
updated = True
|
if self.edit_tags(tag_edit, item, meta, methods, extra=genres if tag_edit == "genre" else None):
|
||||||
|
updated = True
|
||||||
|
finish_edit(item, f"{self.library.type}: {mapping_name}")
|
||||||
|
|
||||||
if self.library.type in util.advance_tags_to_edit:
|
if self.library.type in util.advance_tags_to_edit:
|
||||||
advance_edits = {}
|
advance_edits = {}
|
||||||
|
@ -716,22 +728,18 @@ class MetadataFile(DataFile):
|
||||||
if advance_edit in ["metadata_language", "use_original_title"] and self.library.agent not in plex.new_plex_agents:
|
if advance_edit in ["metadata_language", "use_original_title"] and self.library.agent not in plex.new_plex_agents:
|
||||||
logger.error(f"Metadata Error: {advance_edit} attribute only works for with the New Plex Movie Agent and New Plex TV Agent")
|
logger.error(f"Metadata Error: {advance_edit} attribute only works for with the New Plex Movie Agent and New Plex TV Agent")
|
||||||
elif meta[methods[advance_edit]]:
|
elif meta[methods[advance_edit]]:
|
||||||
key, options = plex.item_advance_keys[f"item_{advance_edit}"]
|
ad_key, options = plex.item_advance_keys[f"item_{advance_edit}"]
|
||||||
method_data = str(meta[methods[advance_edit]]).lower()
|
method_data = str(meta[methods[advance_edit]]).lower()
|
||||||
if method_data not in options:
|
if method_data not in options:
|
||||||
logger.error(f"Metadata Error: {meta[methods[advance_edit]]} {advance_edit} attribute invalid")
|
logger.error(f"Metadata Error: {meta[methods[advance_edit]]} {advance_edit} attribute invalid")
|
||||||
elif key in prefs and getattr(item, key) != options[method_data]:
|
elif ad_key in prefs and getattr(item, ad_key) != options[method_data]:
|
||||||
advance_edits[key] = options[method_data]
|
advance_edits[ad_key] = options[method_data]
|
||||||
logger.info(f"Detail: {advance_edit} updated to {method_data}")
|
logger.info(f"Detail: {advance_edit} updated to {method_data}")
|
||||||
else:
|
else:
|
||||||
logger.error(f"Metadata Error: {advance_edit} attribute is blank")
|
logger.error(f"Metadata Error: {advance_edit} attribute is blank")
|
||||||
if self.library.edit_item(item, mapping_name, self.library.type, advance_edits, advanced=True):
|
if self.library.edit_item(item, mapping_name, self.library.type, advance_edits, advanced=True):
|
||||||
updated = True
|
updated = True
|
||||||
|
|
||||||
for tag_edit in util.tags_to_edit[self.library.type]:
|
|
||||||
if self.edit_tags(tag_edit, item, meta, methods, extra=genres if tag_edit == "genre" else None):
|
|
||||||
updated = True
|
|
||||||
|
|
||||||
logger.info(f"{self.library.type}: {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
logger.info(f"{self.library.type}: {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
||||||
|
|
||||||
self.set_images(item, meta, methods)
|
self.set_images(item, meta, methods)
|
||||||
|
@ -756,12 +764,11 @@ class MetadataFile(DataFile):
|
||||||
logger.error(f"Metadata Error: Season: {season_id} not found")
|
logger.error(f"Metadata Error: Season: {season_id} not found")
|
||||||
continue
|
continue
|
||||||
season_methods = {sm.lower(): sm for sm in season_dict}
|
season_methods = {sm.lower(): sm for sm in season_dict}
|
||||||
edits = {}
|
season.batchEdits()
|
||||||
add_edit("title", season, season_dict, season_methods)
|
add_edit("title", season, season_dict, season_methods)
|
||||||
add_edit("summary", season, season_dict, season_methods)
|
add_edit("summary", season, season_dict, season_methods)
|
||||||
add_edit("user_rating", season, season_dict, season_methods, key="userRating", var_type="float")
|
add_edit("user_rating", season, season_dict, season_methods, key="userRating", var_type="float")
|
||||||
if self.library.edit_item(season, season_id, "Season", edits):
|
finish_edit(season, f"Season: {season_id}")
|
||||||
updated = True
|
|
||||||
self.set_images(season, season_dict, season_methods)
|
self.set_images(season, season_dict, season_methods)
|
||||||
logger.info(f"Season {season_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
logger.info(f"Season {season_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
||||||
|
|
||||||
|
@ -785,7 +792,7 @@ class MetadataFile(DataFile):
|
||||||
logger.error(f"Metadata Error: Episode {episode_str} in Season {season_id} not found")
|
logger.error(f"Metadata Error: Episode {episode_str} in Season {season_id} not found")
|
||||||
continue
|
continue
|
||||||
episode_methods = {em.lower(): em for em in episode_dict}
|
episode_methods = {em.lower(): em for em in episode_dict}
|
||||||
edits = {}
|
episode.batchEdits()
|
||||||
add_edit("title", episode, episode_dict, episode_methods)
|
add_edit("title", episode, episode_dict, episode_methods)
|
||||||
add_edit("sort_title", episode, episode_dict, episode_methods, key="titleSort")
|
add_edit("sort_title", episode, episode_dict, episode_methods, key="titleSort")
|
||||||
add_edit("critic_rating", episode, episode_dict, episode_methods, key="rating", var_type="float")
|
add_edit("critic_rating", episode, episode_dict, episode_methods, key="rating", var_type="float")
|
||||||
|
@ -793,11 +800,10 @@ class MetadataFile(DataFile):
|
||||||
add_edit("user_rating", episode, episode_dict, episode_methods, key="userRating", var_type="float")
|
add_edit("user_rating", episode, episode_dict, episode_methods, key="userRating", var_type="float")
|
||||||
add_edit("originally_available", episode, episode_dict, episode_methods, key="originallyAvailableAt", var_type="date")
|
add_edit("originally_available", episode, episode_dict, episode_methods, key="originallyAvailableAt", var_type="date")
|
||||||
add_edit("summary", episode, episode_dict, episode_methods)
|
add_edit("summary", episode, episode_dict, episode_methods)
|
||||||
if self.library.edit_item(episode, f"{episode_str} in Season: {season_id}", "Episode", edits):
|
|
||||||
updated = True
|
|
||||||
for tag_edit in ["director", "writer"]:
|
for tag_edit in ["director", "writer"]:
|
||||||
if self.edit_tags(tag_edit, episode, episode_dict, episode_methods):
|
if self.edit_tags(tag_edit, episode, episode_dict, episode_methods):
|
||||||
updated = True
|
updated = True
|
||||||
|
finish_edit(episode, f"Episode: {episode_str} in Season: {season_id}")
|
||||||
self.set_images(episode, episode_dict, episode_methods)
|
self.set_images(episode, episode_dict, episode_methods)
|
||||||
logger.info(f"Episode {episode_str} in Season {season_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
logger.info(f"Episode {episode_str} in Season {season_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
||||||
|
|
||||||
|
@ -824,7 +830,7 @@ class MetadataFile(DataFile):
|
||||||
logger.error(f"Metadata Error: episode {episode_id} of season {season_id} not found")
|
logger.error(f"Metadata Error: episode {episode_id} of season {season_id} not found")
|
||||||
continue
|
continue
|
||||||
episode_methods = {em.lower(): em for em in episode_dict}
|
episode_methods = {em.lower(): em for em in episode_dict}
|
||||||
edits = {}
|
episode.batchEdits()
|
||||||
add_edit("title", episode, episode_dict, episode_methods)
|
add_edit("title", episode, episode_dict, episode_methods)
|
||||||
add_edit("sort_title", episode, episode_dict, episode_methods, key="titleSort")
|
add_edit("sort_title", episode, episode_dict, episode_methods, key="titleSort")
|
||||||
add_edit("critic_rating", episode, episode_dict, episode_methods, key="rating", var_type="float")
|
add_edit("critic_rating", episode, episode_dict, episode_methods, key="rating", var_type="float")
|
||||||
|
@ -832,11 +838,10 @@ class MetadataFile(DataFile):
|
||||||
add_edit("user_rating", episode, episode_dict, episode_methods, key="userRating", var_type="float")
|
add_edit("user_rating", episode, episode_dict, episode_methods, key="userRating", var_type="float")
|
||||||
add_edit("originally_available", episode, episode_dict, episode_methods, key="originallyAvailableAt", var_type="date")
|
add_edit("originally_available", episode, episode_dict, episode_methods, key="originallyAvailableAt", var_type="date")
|
||||||
add_edit("summary", episode, episode_dict, episode_methods)
|
add_edit("summary", episode, episode_dict, episode_methods)
|
||||||
if self.library.edit_item(episode, f"{season_id} Episode: {episode_id}", "Season", edits):
|
|
||||||
updated = True
|
|
||||||
for tag_edit in ["director", "writer"]:
|
for tag_edit in ["director", "writer"]:
|
||||||
if self.edit_tags(tag_edit, episode, episode_dict, episode_methods):
|
if self.edit_tags(tag_edit, episode, episode_dict, episode_methods):
|
||||||
updated = True
|
updated = True
|
||||||
|
finish_edit(episode, f"Episode: {episode_str} in Season: {season_id}")
|
||||||
self.set_images(episode, episode_dict, episode_methods)
|
self.set_images(episode, episode_dict, episode_methods)
|
||||||
logger.info(f"Episode S{season_id}E{episode_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
logger.info(f"Episode S{season_id}E{episode_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
||||||
|
|
||||||
|
@ -863,7 +868,7 @@ class MetadataFile(DataFile):
|
||||||
continue
|
continue
|
||||||
if not title:
|
if not title:
|
||||||
title = album.title
|
title = album.title
|
||||||
edits = {}
|
album.batchEdits()
|
||||||
add_edit("title", album, album_dict, album_methods, value=title)
|
add_edit("title", album, album_dict, album_methods, value=title)
|
||||||
add_edit("sort_title", album, album_dict, album_methods, key="titleSort")
|
add_edit("sort_title", album, album_dict, album_methods, key="titleSort")
|
||||||
add_edit("critic_rating", album, album_dict, album_methods, key="rating", var_type="float")
|
add_edit("critic_rating", album, album_dict, album_methods, key="rating", var_type="float")
|
||||||
|
@ -871,11 +876,10 @@ class MetadataFile(DataFile):
|
||||||
add_edit("originally_available", album, album_dict, album_methods, key="originallyAvailableAt", var_type="date")
|
add_edit("originally_available", album, album_dict, album_methods, key="originallyAvailableAt", var_type="date")
|
||||||
add_edit("record_label", album, album_dict, album_methods, key="studio")
|
add_edit("record_label", album, album_dict, album_methods, key="studio")
|
||||||
add_edit("summary", album, album_dict, album_methods)
|
add_edit("summary", album, album_dict, album_methods)
|
||||||
if self.library.edit_item(album, title, "Album", edits):
|
|
||||||
updated = True
|
|
||||||
for tag_edit in ["genre", "style", "mood", "collection", "label"]:
|
for tag_edit in ["genre", "style", "mood", "collection", "label"]:
|
||||||
if self.edit_tags(tag_edit, album, album_dict, album_methods):
|
if self.edit_tags(tag_edit, album, album_dict, album_methods):
|
||||||
updated = True
|
updated = True
|
||||||
|
finish_edit(album, f"Album: {title}")
|
||||||
self.set_images(album, album_dict, album_methods)
|
self.set_images(album, album_dict, album_methods)
|
||||||
logger.info(f"Album: {title} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
logger.info(f"Album: {title} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
||||||
|
|
||||||
|
@ -906,16 +910,15 @@ class MetadataFile(DataFile):
|
||||||
|
|
||||||
if not title:
|
if not title:
|
||||||
title = track.title
|
title = track.title
|
||||||
edits = {}
|
track.batchEdits()
|
||||||
add_edit("title", track, track_dict, track_methods, value=title)
|
add_edit("title", track, track_dict, track_methods, value=title)
|
||||||
add_edit("user_rating", track, track_dict, track_methods, key="userRating", var_type="float")
|
add_edit("user_rating", track, track_dict, track_methods, key="userRating", var_type="float")
|
||||||
add_edit("track", track, track_dict, track_methods, key="index", var_type="int")
|
add_edit("track", track, track_dict, track_methods, key="index", var_type="int")
|
||||||
add_edit("disc", track, track_dict, track_methods, key="parentIndex", var_type="int")
|
add_edit("disc", track, track_dict, track_methods, key="parentIndex", var_type="int")
|
||||||
add_edit("original_artist", track, track_dict, track_methods, key="originalTitle")
|
add_edit("original_artist", track, track_dict, track_methods, key="originalTitle")
|
||||||
if self.library.edit_item(track, title, "Track", edits):
|
|
||||||
updated = True
|
|
||||||
if self.edit_tags("mood", track, track_dict, track_methods):
|
if self.edit_tags("mood", track, track_dict, track_methods):
|
||||||
updated = True
|
updated = True
|
||||||
|
finish_edit(track, f"Track: {title}")
|
||||||
logger.info(f"Track: {track_num} on Album: {title} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
logger.info(f"Track: {track_num} on Album: {title} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
||||||
|
|
||||||
if "f1_season" in methods and self.library.is_show:
|
if "f1_season" in methods and self.library.is_show:
|
||||||
|
@ -964,19 +967,17 @@ class MetadataFile(DataFile):
|
||||||
race = race_lookup[season.seasonNumber]
|
race = race_lookup[season.seasonNumber]
|
||||||
title = race.format_name(round_prefix, shorten_gp)
|
title = race.format_name(round_prefix, shorten_gp)
|
||||||
updated = False
|
updated = False
|
||||||
edits = {}
|
season.batchEdits()
|
||||||
add_edit("title", season, value=title)
|
add_edit("title", season, value=title)
|
||||||
if self.library.edit_item(season, title, "Season", edits):
|
finish_edit(season, f"Season: {title}")
|
||||||
updated = True
|
|
||||||
logger.info(f"Race {season.seasonNumber} of F1 Season {f1_season}: Details Update {'Complete' if updated else 'Not Needed'}")
|
logger.info(f"Race {season.seasonNumber} of F1 Season {f1_season}: Details Update {'Complete' if updated else 'Not Needed'}")
|
||||||
for episode in season.episodes():
|
for episode in season.episodes():
|
||||||
if len(episode.locations) > 0:
|
if len(episode.locations) > 0:
|
||||||
ep_title, session_date = race.session_info(episode.locations[0], sprint_weekend)
|
ep_title, session_date = race.session_info(episode.locations[0], sprint_weekend)
|
||||||
edits = {}
|
episode.batchEdits()
|
||||||
add_edit("title", episode, value=ep_title)
|
add_edit("title", episode, value=ep_title)
|
||||||
add_edit("originally_available", episode, key="originallyAvailableAt", var_type="date", value=session_date)
|
add_edit("originally_available", episode, key="originallyAvailableAt", var_type="date", value=session_date)
|
||||||
if self.library.edit_item(episode, f"{season.seasonNumber} Episode: {episode.episodeNumber}", "Season", edits):
|
finish_edit(episode, f"Season: {season.seasonNumber} Episode: {episode.episodeNumber}")
|
||||||
updated = True
|
|
||||||
logger.info(f"Session {episode.title}: Details Update {'Complete' if updated else 'Not Needed'}")
|
logger.info(f"Session {episode.title}: Details Update {'Complete' if updated else 'Not Needed'}")
|
||||||
else:
|
else:
|
||||||
logger.warning(f"Ergast Error: No Round: {season.seasonNumber} for Season {f1_season}")
|
logger.warning(f"Ergast Error: No Round: {season.seasonNumber} for Season {f1_season}")
|
||||||
|
|
|
@ -515,7 +515,7 @@ class Plex(Library):
|
||||||
def collection_mode_query(self, collection, data):
|
def collection_mode_query(self, collection, data):
|
||||||
if int(collection.collectionMode) not in collection_mode_keys or collection_mode_keys[int(collection.collectionMode)] != data:
|
if int(collection.collectionMode) not in collection_mode_keys or collection_mode_keys[int(collection.collectionMode)] != data:
|
||||||
collection.modeUpdate(mode=data)
|
collection.modeUpdate(mode=data)
|
||||||
logger.info(f"Detail: collection_order updated Collection Order to {data}")
|
logger.info(f"Collection Mode | data")
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
||||||
def collection_order_query(self, collection, data):
|
def collection_order_query(self, collection, data):
|
||||||
|
@ -830,8 +830,9 @@ class Plex(Library):
|
||||||
logger.error(f"{item_type}: {name}{' Advanced' if advanced else ''} Details Update Failed")
|
logger.error(f"{item_type}: {name}{' Advanced' if advanced else ''} Details Update Failed")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def edit_tags(self, attr, obj, add_tags=None, remove_tags=None, sync_tags=None):
|
def edit_tags(self, attr, obj, add_tags=None, remove_tags=None, sync_tags=None, do_print=True):
|
||||||
display = ""
|
display = ""
|
||||||
|
final = ""
|
||||||
key = builder.filter_translation[attr] if attr in builder.filter_translation else attr
|
key = builder.filter_translation[attr] if attr in builder.filter_translation else attr
|
||||||
attr_display = attr.replace("_", " ").title()
|
attr_display = attr.replace("_", " ").title()
|
||||||
attr_call = attr_display.replace(" ", "")
|
attr_call = attr_display.replace(" ", "")
|
||||||
|
@ -852,9 +853,10 @@ class Plex(Library):
|
||||||
if _remove:
|
if _remove:
|
||||||
self.query_data(getattr(obj, f"remove{attr_call}"), _remove)
|
self.query_data(getattr(obj, f"remove{attr_call}"), _remove)
|
||||||
display += f"-{', -'.join(_remove)}"
|
display += f"-{', -'.join(_remove)}"
|
||||||
if len(display) > 0:
|
final = f"{obj.title[:25]:<25} | {attr_display} | {display}" if display else display
|
||||||
logger.info(f"{obj.title[:25]:<25} | {attr_display} | {display}")
|
if do_print:
|
||||||
return len(display) > 0
|
logger.info(final)
|
||||||
|
return final
|
||||||
|
|
||||||
def find_assets(self, item, name=None, upload=True, overlay=None, folders=None, create=None):
|
def find_assets(self, item, name=None, upload=True, overlay=None, folders=None, create=None):
|
||||||
if isinstance(item, (Movie, Artist, Show)):
|
if isinstance(item, (Movie, Artist, Show)):
|
||||||
|
|
|
@ -426,7 +426,7 @@ def parse(error, attribute, data, datatype=None, methods=None, parent=None, defa
|
||||||
options = [o for o in translation]
|
options = [o for o in translation]
|
||||||
value = data[methods[attribute]] if methods and attribute in methods else data
|
value = data[methods[attribute]] if methods and attribute in methods else data
|
||||||
|
|
||||||
if datatype in ["list", "commalist"]:
|
if datatype in ["list", "commalist", "strlist"]:
|
||||||
final_list = []
|
final_list = []
|
||||||
if value:
|
if value:
|
||||||
if datatype == "commalist":
|
if datatype == "commalist":
|
||||||
|
@ -435,8 +435,8 @@ def parse(error, attribute, data, datatype=None, methods=None, parent=None, defa
|
||||||
value = [value]
|
value = [value]
|
||||||
for v in value:
|
for v in value:
|
||||||
if v:
|
if v:
|
||||||
if options is None or (options and v in options):
|
if options is None or (options and (v in options or (datatype == "strlist" and str(v) in options))):
|
||||||
final_list.append(v)
|
final_list.append(str(v) if datatype == "strlist" else v)
|
||||||
elif options:
|
elif options:
|
||||||
raise Failed(f"{error} Error: {v} is invalid options are: {options}")
|
raise Failed(f"{error} Error: {v} is invalid options are: {options}")
|
||||||
return final_list
|
return final_list
|
||||||
|
|
|
@ -116,7 +116,7 @@ with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "VERSION")) a
|
||||||
version = util.parse_version(line)
|
version = util.parse_version(line)
|
||||||
break
|
break
|
||||||
|
|
||||||
plexapi.BASE_HEADERS['X-Plex-Client-Identifier'] = "Plex-Meta-Manager"
|
plexapi.BASE_HEADERS["X-Plex-Client-Identifier"] = "Plex-Meta-Manager"
|
||||||
|
|
||||||
def start(attrs):
|
def start(attrs):
|
||||||
logger.add_main_handler()
|
logger.add_main_handler()
|
||||||
|
@ -186,7 +186,7 @@ def start(attrs):
|
||||||
logger.critical(e)
|
logger.critical(e)
|
||||||
logger.info("")
|
logger.info("")
|
||||||
end_time = datetime.now()
|
end_time = datetime.now()
|
||||||
run_time = str(end_time - start_time).split('.')[0]
|
run_time = str(end_time - start_time).split(".")[0]
|
||||||
if config:
|
if config:
|
||||||
try:
|
try:
|
||||||
config.Webhooks.end_time_hooks(start_time, end_time, run_time, stats)
|
config.Webhooks.end_time_hooks(start_time, end_time, run_time, stats)
|
||||||
|
@ -438,12 +438,12 @@ def library_operations(config, library):
|
||||||
if library.update_blank_track_titles:
|
if library.update_blank_track_titles:
|
||||||
tracks = library.get_all(collection_level="track")
|
tracks = library.get_all(collection_level="track")
|
||||||
num_edited = 0
|
num_edited = 0
|
||||||
for i, item in enumerate(tracks, 1):
|
for i, track in enumerate(tracks, 1):
|
||||||
logger.ghost(f"Processing Track: {i}/{len(tracks)} {item.title}")
|
logger.ghost(f"Processing Track: {i}/{len(tracks)} {track.title}")
|
||||||
if not item.title and item.titleSort:
|
if not track.title and track.titleSort:
|
||||||
library.edit_query(item, {"title.locked": 1, "title.value": item.titleSort})
|
track.editTitle(track.titleSort)
|
||||||
num_edited += 1
|
num_edited += 1
|
||||||
logger.info(f"Track: {item.titleSort} was updated with sort title")
|
logger.info(f"Track: {track.titleSort} was updated with sort title")
|
||||||
logger.info(f"{len(tracks)} Tracks Processed; {num_edited} Blank Track Titles Updated")
|
logger.info(f"{len(tracks)} Tracks Processed; {num_edited} Blank Track Titles Updated")
|
||||||
|
|
||||||
tmdb_collections = {}
|
tmdb_collections = {}
|
||||||
|
@ -469,6 +469,9 @@ def library_operations(config, library):
|
||||||
library.find_assets(item)
|
library.find_assets(item)
|
||||||
tmdb_id, tvdb_id, imdb_id = library.get_ids(item)
|
tmdb_id, tvdb_id, imdb_id = library.get_ids(item)
|
||||||
|
|
||||||
|
item.batchEdits()
|
||||||
|
batch_display = "Batch Edits"
|
||||||
|
|
||||||
if library.mass_trakt_rating_update:
|
if library.mass_trakt_rating_update:
|
||||||
try:
|
try:
|
||||||
if library.is_movie and tmdb_id in trakt_ratings:
|
if library.is_movie and tmdb_id in trakt_ratings:
|
||||||
|
@ -478,7 +481,7 @@ def library_operations(config, library):
|
||||||
else:
|
else:
|
||||||
raise Failed
|
raise Failed
|
||||||
if str(item.userRating) != str(new_rating):
|
if str(item.userRating) != str(new_rating):
|
||||||
library.edit_query(item, {"userRating.value": new_rating, "userRating.locked": 1})
|
library.query_data(item.rate, new_rating)
|
||||||
logger.info(f"{item.title[:25]:<25} | User Rating | {new_rating}")
|
logger.info(f"{item.title[:25]:<25} | User Rating | {new_rating}")
|
||||||
except Failed:
|
except Failed:
|
||||||
pass
|
pass
|
||||||
|
@ -487,7 +490,7 @@ def library_operations(config, library):
|
||||||
try:
|
try:
|
||||||
parental_guide = config.IMDb.parental_guide(imdb_id)
|
parental_guide = config.IMDb.parental_guide(imdb_id)
|
||||||
labels = [f"{k.capitalize()}:{v}" for k, v in parental_guide.items() if library.mass_imdb_parental_labels == "with_none" or v != "None"]
|
labels = [f"{k.capitalize()}:{v}" for k, v in parental_guide.items() if library.mass_imdb_parental_labels == "with_none" or v != "None"]
|
||||||
library.edit_tags("label", item, add_tags=labels)
|
batch_display += f"\n{library.edit_tags('label', item, add_tags=labels)}"
|
||||||
except Failed:
|
except Failed:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -498,19 +501,15 @@ def library_operations(config, library):
|
||||||
radarr_adds.append((tmdb_id, path))
|
radarr_adds.append((tmdb_id, path))
|
||||||
if library.Sonarr and library.sonarr_add_all_existing and tvdb_id:
|
if library.Sonarr and library.sonarr_add_all_existing and tvdb_id:
|
||||||
path = path.replace(library.Sonarr.plex_path, library.Sonarr.sonarr_path)
|
path = path.replace(library.Sonarr.plex_path, library.Sonarr.sonarr_path)
|
||||||
path = path[:-1] if path.endswith(('/', '\\')) else path
|
path = path[:-1] if path.endswith(("/", "\\")) else path
|
||||||
sonarr_adds.append((tvdb_id, path))
|
sonarr_adds.append((tvdb_id, path))
|
||||||
|
|
||||||
tmdb_item = None
|
tmdb_item = None
|
||||||
if library.tmdb_collections or library.mass_genre_update == "tmdb" or library.mass_audience_rating_update == "tmdb" \
|
if library.tmdb_collections or any([o == "tmdb" for o in library.meta_operations]):
|
||||||
or library.mass_critic_rating_update == "tmdb" or library.mass_originally_available_update == "tmdb" \
|
|
||||||
or library.mass_content_rating_update == "tmdb":
|
|
||||||
tmdb_item = config.TMDb.get_item(item, tmdb_id, tvdb_id, imdb_id, is_movie=library.is_movie)
|
tmdb_item = config.TMDb.get_item(item, tmdb_id, tvdb_id, imdb_id, is_movie=library.is_movie)
|
||||||
|
|
||||||
omdb_item = None
|
omdb_item = None
|
||||||
if library.mass_genre_update == "omdb" or library.mass_audience_rating_update == "omdb" \
|
if any([o == "omdb" for o in library.meta_operations]):
|
||||||
or library.mass_critic_rating_update == "omdb" or library.mass_content_rating_update == "omdb" \
|
|
||||||
or library.mass_originally_available_update == "omdb":
|
|
||||||
if config.OMDb.limit is False:
|
if config.OMDb.limit is False:
|
||||||
if tmdb_id and not imdb_id:
|
if tmdb_id and not imdb_id:
|
||||||
imdb_id = config.Convert.tmdb_to_imdb(tmdb_id)
|
imdb_id = config.Convert.tmdb_to_imdb(tmdb_id)
|
||||||
|
@ -528,7 +527,7 @@ def library_operations(config, library):
|
||||||
logger.info(f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}")
|
logger.info(f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}")
|
||||||
|
|
||||||
tvdb_item = None
|
tvdb_item = None
|
||||||
if library.mass_genre_update == "tvdb" or library.mass_originally_available_update == "tvdb":
|
if any([o == "tvdb" for o in library.meta_operations]):
|
||||||
if tvdb_id:
|
if tvdb_id:
|
||||||
try:
|
try:
|
||||||
tvdb_item = config.TVDb.get_item(tvdb_id, library.is_movie)
|
tvdb_item = config.TVDb.get_item(tvdb_id, library.is_movie)
|
||||||
|
@ -538,7 +537,7 @@ def library_operations(config, library):
|
||||||
logger.info(f"{item.title[:25]:<25} | No TVDb ID for Guid: {item.guid}")
|
logger.info(f"{item.title[:25]:<25} | No TVDb ID for Guid: {item.guid}")
|
||||||
|
|
||||||
anidb_item = None
|
anidb_item = None
|
||||||
if library.mass_genre_update == "anidb":
|
if any([o == "anidb" for o in library.meta_operations]):
|
||||||
if item.ratingKey in reverse_anidb:
|
if item.ratingKey in reverse_anidb:
|
||||||
anidb_id = reverse_anidb[item.ratingKey]
|
anidb_id = reverse_anidb[item.ratingKey]
|
||||||
elif tvdb_id in config.Convert._tvdb_to_anidb:
|
elif tvdb_id in config.Convert._tvdb_to_anidb:
|
||||||
|
@ -555,8 +554,7 @@ def library_operations(config, library):
|
||||||
logger.error(str(e))
|
logger.error(str(e))
|
||||||
|
|
||||||
mdb_item = None
|
mdb_item = None
|
||||||
if library.mass_audience_rating_update in util.mdb_types or library.mass_critic_rating_update in util.mdb_types \
|
if any([o and o.startswith("mdb") for o in library.meta_operations]):
|
||||||
or library.mass_content_rating_update in ["mdb", "mdb_commonsense"] or library.mass_originally_available_update == "mdb":
|
|
||||||
if config.Mdblist.limit is False:
|
if config.Mdblist.limit is False:
|
||||||
if tmdb_id and not imdb_id:
|
if tmdb_id and not imdb_id:
|
||||||
imdb_id = config.Convert.tmdb_to_imdb(tmdb_id)
|
imdb_id = config.Convert.tmdb_to_imdb(tmdb_id)
|
||||||
|
@ -607,55 +605,48 @@ def library_operations(config, library):
|
||||||
raise Failed
|
raise Failed
|
||||||
|
|
||||||
if library.mass_genre_update or library.genre_mapper:
|
if library.mass_genre_update or library.genre_mapper:
|
||||||
try:
|
new_genres = []
|
||||||
new_genres = []
|
if library.mass_genre_update:
|
||||||
if library.mass_genre_update:
|
if tmdb_item and library.mass_genre_update == "tmdb":
|
||||||
if tmdb_item and library.mass_genre_update == "tmdb":
|
new_genres = tmdb_item.genres
|
||||||
new_genres = tmdb_item.genres
|
elif omdb_item and library.mass_genre_update == "omdb":
|
||||||
elif omdb_item and library.mass_genre_update == "omdb":
|
new_genres = omdb_item.genres
|
||||||
new_genres = omdb_item.genres
|
elif tvdb_item and library.mass_genre_update == "tvdb":
|
||||||
elif tvdb_item and library.mass_genre_update == "tvdb":
|
new_genres = tvdb_item.genres
|
||||||
new_genres = tvdb_item.genres
|
elif anidb_item and library.mass_genre_update == "anidb":
|
||||||
elif anidb_item and library.mass_genre_update == "anidb":
|
new_genres = anidb_item.tags
|
||||||
new_genres = anidb_item.tags
|
else:
|
||||||
|
raise Failed
|
||||||
|
if not new_genres:
|
||||||
|
logger.info(f"{item.title[:25]:<25} | No Genres Found")
|
||||||
|
if library.genre_mapper:
|
||||||
|
if not new_genres:
|
||||||
|
new_genres = [g.tag for g in item.genres]
|
||||||
|
mapped_genres = []
|
||||||
|
for genre in new_genres:
|
||||||
|
if genre in library.genre_mapper:
|
||||||
|
if library.genre_mapper[genre]:
|
||||||
|
mapped_genres.append(library.genre_mapper[genre])
|
||||||
else:
|
else:
|
||||||
raise Failed
|
mapped_genres.append(genre)
|
||||||
if not new_genres:
|
new_genres = mapped_genres
|
||||||
logger.info(f"{item.title[:25]:<25} | No Genres Found")
|
batch_display += f"\n{library.edit_tags('genre', item, sync_tags=new_genres)}"
|
||||||
if library.genre_mapper:
|
|
||||||
if not new_genres:
|
|
||||||
new_genres = [g.tag for g in item.genres]
|
|
||||||
mapped_genres = []
|
|
||||||
for genre in new_genres:
|
|
||||||
if genre in library.genre_mapper:
|
|
||||||
if library.genre_mapper[genre]:
|
|
||||||
mapped_genres.append(library.genre_mapper[genre])
|
|
||||||
else:
|
|
||||||
mapped_genres.append(genre)
|
|
||||||
new_genres = mapped_genres
|
|
||||||
library.edit_tags("genre", item, sync_tags=new_genres)
|
|
||||||
except Failed:
|
|
||||||
pass
|
|
||||||
if library.mass_audience_rating_update:
|
if library.mass_audience_rating_update:
|
||||||
try:
|
new_rating = get_rating(library.mass_audience_rating_update)
|
||||||
new_rating = get_rating(library.mass_audience_rating_update)
|
if new_rating is None:
|
||||||
if new_rating is None:
|
logger.info(f"{item.title[:25]:<25} | No Rating Found")
|
||||||
logger.info(f"{item.title[:25]:<25} | No Rating Found")
|
elif str(item.audienceRating) != str(new_rating):
|
||||||
elif str(item.audienceRating) != str(new_rating):
|
item.editField("audienceRating", new_rating)
|
||||||
library.edit_query(item, {"audienceRating.value": new_rating, "audienceRating.locked": 1})
|
batch_display += f"\n{item.title[:25]:<25} | Audience Rating | {new_rating}"
|
||||||
logger.info(f"{item.title[:25]:<25} | Audience Rating | {new_rating}")
|
|
||||||
except Failed:
|
|
||||||
pass
|
|
||||||
if library.mass_critic_rating_update:
|
if library.mass_critic_rating_update:
|
||||||
try:
|
new_rating = get_rating(library.mass_critic_rating_update)
|
||||||
new_rating = get_rating(library.mass_critic_rating_update)
|
if new_rating is None:
|
||||||
if new_rating is None:
|
logger.info(f"{item.title[:25]:<25} | No Rating Found")
|
||||||
logger.info(f"{item.title[:25]:<25} | No Rating Found")
|
elif str(item.rating) != str(new_rating):
|
||||||
elif str(item.rating) != str(new_rating):
|
item.editField("rating", new_rating)
|
||||||
library.edit_query(item, {"rating.value": new_rating, "rating.locked": 1})
|
batch_display += f"{item.title[:25]:<25} | Critic Rating | {new_rating}"
|
||||||
logger.info(f"{item.title[:25]:<25} | Critic Rating | {new_rating}")
|
|
||||||
except Failed:
|
|
||||||
pass
|
|
||||||
if library.mass_content_rating_update or library.content_rating_mapper:
|
if library.mass_content_rating_update or library.content_rating_mapper:
|
||||||
try:
|
try:
|
||||||
new_rating = None
|
new_rating = None
|
||||||
|
@ -678,8 +669,8 @@ def library_operations(config, library):
|
||||||
if new_rating in library.content_rating_mapper:
|
if new_rating in library.content_rating_mapper:
|
||||||
new_rating = library.content_rating_mapper[new_rating]
|
new_rating = library.content_rating_mapper[new_rating]
|
||||||
if str(item.contentRating) != str(new_rating):
|
if str(item.contentRating) != str(new_rating):
|
||||||
library.edit_query(item, {"contentRating.value": new_rating, "contentRating.locked": 1})
|
item.editContentRating(new_rating)
|
||||||
logger.info(f"{item.title[:25]:<25} | Content Rating | {new_rating}")
|
batch_display += f"\n{item.title[:25]:<25} | Content Rating | {new_rating}"
|
||||||
except Failed:
|
except Failed:
|
||||||
pass
|
pass
|
||||||
if library.mass_originally_available_update:
|
if library.mass_originally_available_update:
|
||||||
|
@ -698,12 +689,14 @@ def library_operations(config, library):
|
||||||
raise Failed
|
raise Failed
|
||||||
if new_date is None:
|
if new_date is None:
|
||||||
logger.info(f"{item.title[:25]:<25} | No Originally Available Date Found")
|
logger.info(f"{item.title[:25]:<25} | No Originally Available Date Found")
|
||||||
elif str(item.rating) != str(new_date):
|
elif str(item.originallyAvailableAt) != str(new_date):
|
||||||
library.edit_query(item, {"originallyAvailableAt.value": new_date.strftime("%Y-%m-%d"), "originallyAvailableAt.locked": 1})
|
item.editOriginallyAvailable(new_date)
|
||||||
logger.info(f"{item.title[:25]:<25} | Originally Available Date | {new_date.strftime('%Y-%m-%d')}")
|
batch_display += f"\n{item.title[:25]:<25} | Originally Available Date | {new_date.strftime('%Y-%m-%d')}"
|
||||||
except Failed:
|
except Failed:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
item.saveEdits()
|
||||||
|
|
||||||
if library.Radarr and library.radarr_add_all_existing:
|
if library.Radarr and library.radarr_add_all_existing:
|
||||||
try:
|
try:
|
||||||
library.Radarr.add_tmdb(radarr_adds)
|
library.Radarr.add_tmdb(radarr_adds)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
PlexAPI==4.9.2
|
PlexAPI==4.10.1
|
||||||
tmdbapis==1.0.3
|
tmdbapis==1.0.3
|
||||||
arrapi==1.3.1
|
arrapi==1.3.1
|
||||||
lxml==4.8.0
|
lxml==4.8.0
|
||||||
|
|
Loading…
Reference in a new issue