Merge pull request #652 from meisnate12/develop

v1.15.1
This commit is contained in:
meisnate12 2022-01-25 02:54:19 -05:00 committed by GitHub
commit 7d4c4827b4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 507 additions and 447 deletions

2
.gitignore vendored
View file

@ -9,7 +9,7 @@ __pycache__/
# Distribution / packaging
.idea
.Python
/test.py
/test*
logs/
config/*
!config/overlays/

View file

@ -57,6 +57,7 @@ Before posting on GitHub about an enhancement, error, or configuration question
## Wiki Table of Contents
- [Home](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Home)
- [Installation](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Installation)
- [Run Commands & Environmental Variables](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Run-Commands-&-Environmental-Variables)
- [Local Walkthrough](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Local-Walkthrough)
- [Docker Walkthrough](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Docker-Walkthrough)
- [unRAID Walkthrough](https://github.com/meisnate12/Plex-Meta-Manager/wiki/unRAID-Walkthrough)
@ -79,9 +80,9 @@ Before posting on GitHub about an enhancement, error, or configuration question
- [MyAnimeList Attributes](https://github.com/meisnate12/Plex-Meta-Manager/wiki/MyAnimeList-Attributes)
- [Metadata and Playlist Files](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Metadata-and-Playlist-Files)
- Metadata
- [Movies Metadata](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Movies-Metadata)
- [Shows Metadata](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Shows-Metadata)
- [Artists Metadata](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Artists-Metadata)
- [Movie Library Metadata](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Movie-Library-Metadata)
- [TV Show Library Metadata](https://github.com/meisnate12/Plex-Meta-Manager/wiki/TV-Show-Library-Metadata)
- [Music Library Metadata](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Music-Library-Metadata)
- [Templates](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Templates)
- [Filters](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Filters)
- Builders
@ -92,6 +93,7 @@ Before posting on GitHub about an enhancement, error, or configuration question
- [IMDb Builders](https://github.com/meisnate12/Plex-Meta-Manager/wiki/IMDb-Builders)
- [Trakt Builders](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Trakt-Builders)
- [Tautulli Builders](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Tautulli-Builders)
- [MdbList Builders](https://github.com/meisnate12/Plex-Meta-Manager/wiki/MdbList-Builders)
- [Letterboxd Builders](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Letterboxd-Builders)
- [ICheckMovies Builders](https://github.com/meisnate12/Plex-Meta-Manager/wiki/ICheckMovies-Builders)
- [FlixPatrol Builders](https://github.com/meisnate12/Plex-Meta-Manager/wiki/FlixPatrol-Builders)

View file

@ -1 +1 @@
1.15.0
1.15.1

View file

@ -26,6 +26,8 @@ settings: # Can be individually specified
dimensional_asset_rename: false
download_url_assets: false
show_missing_season_assets: false
show_missing_episode_assets: false
show_asset_not_needed: true
sync_mode: append
minimum_items: 1
default_collection_order:
@ -43,6 +45,7 @@ settings: # Can be individually specified
tvdb_language: eng
ignore_ids:
ignore_imdb_ids:
item_refresh_delay: 0
playlist_sync_to_user: all
verify_ssl: true
webhooks: # Can be individually specified per library as well

View file

@ -65,7 +65,9 @@ filter_translation = {
"last_played": "lastViewedAt",
"plays": "viewCount",
"user_rating": "userRating",
"writer": "writers"
"writer": "writers",
"mood": "moods",
"style": "styles"
}
modifier_alias = {".greater": ".gt", ".less": ".lt"}
all_builders = anidb.builders + anilist.builders + flixpatrol.builders + icheckmovies.builders + imdb.builders + \
@ -93,21 +95,21 @@ string_details = ["sort_title", "content_rating", "name_mapping"]
ignored_details = [
"smart_filter", "smart_label", "smart_url", "run_again", "schedule", "sync_mode", "template", "test",
"delete_not_scheduled", "tmdb_person", "build_collection", "collection_order", "collection_level",
"validate_builders", "sort_by", "libraries", "sync_to_users", "collection_name", "playlist_name", "name"
"validate_builders", "libraries", "sync_to_users", "collection_name", "playlist_name", "name"
]
details = ["ignore_ids", "ignore_imdb_ids", "server_preroll", "changes_webhooks", "collection_mode",
"minimum_items", "label", "album_sorting"] + boolean_details + scheduled_boolean + string_details
collectionless_details = ["collection_order", "plex_collectionless", "label", "label_sync_mode", "test"] + \
poster_details + background_details + summary_details + string_details
item_bool_details = ["item_tmdb_season_titles", "item_assets", "revert_overlay", "item_lock_background", "item_lock_poster", "item_lock_title", "item_refresh"]
item_details = ["item_label", "item_radarr_tag", "item_sonarr_tag", "item_overlay"] + item_bool_details + list(plex.item_advance_keys.keys())
item_details = ["non_item_remove_label", "item_label", "item_radarr_tag", "item_sonarr_tag", "item_overlay", "item_refresh_delay"] + item_bool_details + list(plex.item_advance_keys.keys())
none_details = ["label.sync", "item_label.sync"]
radarr_details = ["radarr_add_missing", "radarr_add_existing", "radarr_folder", "radarr_monitor", "radarr_search", "radarr_availability", "radarr_quality", "radarr_tag"]
sonarr_details = [
"sonarr_add_missing", "sonarr_add_existing", "sonarr_folder", "sonarr_monitor", "sonarr_language", "sonarr_series",
"sonarr_quality", "sonarr_season", "sonarr_search", "sonarr_cutoff_search", "sonarr_tag"
]
album_details = ["item_label", "item_album_sorting"]
album_details = ["non_item_remove_label", "item_label", "item_album_sorting"]
filters_by_type = {
"movie_show_season_episode_artist_album_track": ["title", "summary", "collection", "has_collection", "added", "last_played", "user_rating", "plays"],
"movie_show_season_episode_album_track": ["year"],
@ -118,7 +120,7 @@ filters_by_type = {
"movie_show_episode": ["actor", "content_rating", "audience_rating"],
"movie_show_album": ["label"],
"movie_episode_track": ["audio_track_title"],
"movie_show": ["studio", "original_language", "has_overlay", "tmdb_vote_count", "tmdb_year"],
"movie_show": ["studio", "original_language", "has_overlay", "tmdb_vote_count", "tmdb_year", "tmdb_genre"],
"movie_episode": ["director", "producer", "writer", "resolution", "audio_language", "subtitle_language"],
"movie_artist": ["country"],
"show": ["network", "first_episode_aired", "last_episode_aired"],
@ -133,12 +135,12 @@ filters = {
"album": [item for check, sub in filters_by_type.items() for item in sub if "album" in check],
"track": [item for check, sub in filters_by_type.items() for item in sub if "track" in check]
}
tmdb_filters = ["original_language", "tmdb_vote_count", "tmdb_year", "first_episode_aired", "last_episode_aired"]
tmdb_filters = ["original_language", "tmdb_vote_count", "tmdb_year", "tmdb_genre", "first_episode_aired", "last_episode_aired"]
string_filters = ["title", "summary", "studio", "record_label", "filepath", "audio_track_title"]
string_modifiers = ["", ".not", ".is", ".isnot", ".begins", ".ends", ".regex"]
tag_filters = [
"actor", "collection", "content_rating", "country", "director", "network", "genre", "label", "producer", "year",
"writer", "original_language", "resolution", "audio_language", "subtitle_language"
"writer", "original_language", "resolution", "audio_language", "subtitle_language", "tmdb_genre"
]
tag_modifiers = ["", ".not"]
boolean_filters = ["has_collection", "has_overlay"]
@ -163,24 +165,25 @@ custom_sort_builders = [
"flixpatrol_url", "flixpatrol_demographics", "flixpatrol_popular", "flixpatrol_top",
"trakt_recommended_daily", "trakt_recommended_weekly", "trakt_recommended_monthly", "trakt_recommended_yearly", "trakt_recommended_all",
"trakt_watched_daily", "trakt_watched_weekly", "trakt_watched_monthly", "trakt_watched_yearly", "trakt_watched_all",
"tautulli_popular", "tautulli_watched", "letterboxd_list", "icheckmovies_list",
"tautulli_popular", "tautulli_watched", "mdblist_list", "letterboxd_list", "icheckmovies_list",
"anilist_top_rated", "anilist_popular", "anilist_trending", "anilist_search",
"mal_all", "mal_airing", "mal_upcoming", "mal_tv", "mal_movie", "mal_ova", "mal_special",
"mal_popular", "mal_favorite", "mal_suggested", "mal_userlist", "mal_season", "mal_genre", "mal_studio"
]
episode_parts_only = ["plex_pilots"]
parts_collection_valid = [
"plex_all", "plex_search", "trakt_list", "trakt_list_details", "collection_mode", "label", "visible_library", "changes_webhooks"
"visible_home", "visible_shared", "show_missing", "save_missing", "missing_only_released", "server_preroll",
"item_lock_background", "item_lock_poster", "item_lock_title", "item_refresh", "imdb_list"
] + summary_details + poster_details + background_details + string_details
"plex_all", "plex_search", "trakt_list", "trakt_list_details", "collection_mode", "label", "visible_library",
"visible_home", "visible_shared", "show_missing", "save_missing", "missing_only_released", "server_preroll", "changes_webhooks",
"item_lock_background", "item_lock_poster", "item_lock_title", "item_refresh", "item_refresh_delay", "imdb_list"
] + episode_parts_only + summary_details + poster_details + background_details + string_details
playlist_attributes = [
"filters", "name_mapping", "show_filtered", "show_missing", "save_missing",
"missing_only_released", "only_filter_missing", "delete_below_minimum", "ignore_ids", "ignore_imdb_ids",
"server_preroll", "changes_webhooks", "minimum_items",
] + custom_sort_builders + summary_details + poster_details + radarr_details + sonarr_details
music_attributes = [
"item_label", "item_assets", "item_lock_background", "item_lock_poster", "item_lock_title",
"item_refresh", "plex_search", "plex_all", "filters"
"non_item_remove_label", "item_label", "item_assets", "item_lock_background", "item_lock_poster", "item_lock_title",
"item_refresh", "item_refresh_delay", "plex_search", "plex_all", "filters"
] + details + summary_details + poster_details + background_details
class CollectionBuilder:
@ -213,6 +216,8 @@ class CollectionBuilder:
self.missing_movies = []
self.missing_shows = []
self.missing_parts = []
self.added_to_radarr = []
self.added_to_sonarr = []
self.builders = []
self.filters = []
self.tmdb_filters = []
@ -374,10 +379,10 @@ class CollectionBuilder:
for tmdb_id in util.get_int_list(self.data[methods["tmdb_person"]], "TMDb Person ID"):
person = self.config.TMDb.get_person(tmdb_id)
valid_names.append(person.name)
if hasattr(person, "biography") and person.biography:
if person.biography:
self.summaries["tmdb_person"] = person.biography
if hasattr(person, "profile_path") and person.profile_path:
self.posters["tmdb_person"] = f"{self.config.TMDb.image_url}{person.profile_path}"
if person.profile_url:
self.posters["tmdb_person"] = person.profile_url
if len(valid_names) > 0:
self.details["tmdb_person"] = valid_names
else:
@ -503,6 +508,8 @@ class CollectionBuilder:
raise Failed(f"{self.Type} Error: {method_final} attribute only allowed for album collections")
elif not self.library.is_music and method_name in music_only_builders:
raise Failed(f"{self.Type} Error: {method_final} attribute only allowed for music libraries")
elif self.collection_level != "episode" and method_name in episode_parts_only:
raise Failed(f"{self.Type} Error: {method_final} attribute only allowed with Collection Level: episode")
elif self.parts_collection and method_name not in parts_collection_valid:
raise Failed(f"{self.Type} Error: {method_final} attribute not allowed with Collection Level: {self.collection_level.capitalize()}")
elif self.smart and method_name in smart_invalid:
@ -563,13 +570,13 @@ class CollectionBuilder:
else:
logger.error(e)
if not self.server_preroll and not self.smart_url and len(self.builders) == 0:
raise Failed(f"{self.Type} Error: No builders were found")
if self.custom_sort is True and (len(self.builders) > 1 or self.builders[0][0] not in custom_sort_builders):
raise Failed(f"{self.Type} Error: " + ('Playlists' if playlist else 'collection_order: custom') +
(f" can only be used with a single builder per {self.type}" if len(self.builders) > 1 else f" cannot be used with {self.builders[0][0]}"))
if not self.server_preroll and not self.smart_url and len(self.builders) == 0:
raise Failed(f"{self.Type} Error: No builders were found")
if "add_missing" not in self.radarr_details:
self.radarr_details["add_missing"] = self.library.Radarr.add_missing if self.library.Radarr else False
if "add_existing" not in self.radarr_details:
@ -715,11 +722,9 @@ class CollectionBuilder:
if method_name == "url_poster":
self.posters[method_name] = method_data
elif method_name == "tmdb_poster":
url_slug = self.config.TMDb.get_movie_show_or_collection(util.regex_first_int(method_data, 'TMDb ID'), self.library.is_movie).poster_path
self.posters[method_name] = f"{self.config.TMDb.image_url}{url_slug}"
self.posters[method_name] = self.config.TMDb.get_movie_show_or_collection(util.regex_first_int(method_data, 'TMDb ID'), self.library.is_movie).poster_url
elif method_name == "tmdb_profile":
url_slug = self.config.TMDb.get_person(util.regex_first_int(method_data, 'TMDb Person ID')).profile_path
self.posters[method_name] = f"{self.config.TMDb.image_url}{url_slug}"
self.posters[method_name] = self.config.TMDb.get_person(util.regex_first_int(method_data, 'TMDb Person ID')).profile_url
elif method_name == "tvdb_poster":
self.posters[method_name] = f"{self.config.TVDb.get_item(method_data, self.library.is_movie).poster_path}"
elif method_name == "file_poster":
@ -732,8 +737,7 @@ class CollectionBuilder:
if method_name == "url_background":
self.backgrounds[method_name] = method_data
elif method_name == "tmdb_background":
url_slug = self.config.TMDb.get_movie_show_or_collection(util.regex_first_int(method_data, 'TMDb ID'), self.library.is_movie).poster_path
self.backgrounds[method_name] = f"{self.config.TMDb.image_url}{url_slug}"
self.backgrounds[method_name] = self.config.TMDb.get_movie_show_or_collection(util.regex_first_int(method_data, 'TMDb ID'), self.library.is_movie).backdrop_url
elif method_name == "tvdb_background":
self.posters[method_name] = f"{self.config.TVDb.get_item(method_data, self.library.is_movie).background_path}"
elif method_name == "file_background":
@ -792,6 +796,10 @@ class CollectionBuilder:
if "item_label.remove" in methods and "item_label.sync" in methods:
raise Failed(f"{self.Type} Error: Cannot use item_label.remove and item_label.sync together")
self.item_details[method_final] = util.get_list(method_data) if method_data else []
elif method_name == "non_item_remove_label":
if not method_data:
raise Failed(f"{self.Type} Error: non_item_remove_label is blank")
self.item_details[method_final] = util.get_list(method_data)
elif method_name in ["item_radarr_tag", "item_sonarr_tag"]:
if method_name in methods and f"{method_name}.sync" in methods:
raise Failed(f"{self.Type} Error: Cannot use {method_name} and {method_name}.sync together")
@ -833,6 +841,8 @@ class CollectionBuilder:
raise Failed("Each Overlay can only be used once per Library")
self.library.overlays.append(name)
self.item_details[method_name] = name
elif method_name == "item_refresh_delay":
self.item_details[method_name] = self._parse(method_name, method_data, datatype="int", default=0, minimum=0)
elif method_name in item_bool_details:
if self._parse(method_name, method_data, datatype="bool", default=False):
self.item_details[method_name] = True
@ -913,9 +923,9 @@ class CollectionBuilder:
for dict_data, dict_methods in self._parse(method_name, method_data, datatype="dictlist"):
new_dictionary = {}
for search_method, search_data in dict_data.items():
search_attr, modifier, search_final = self._split(search_method)
if search_final not in anilist.searches:
raise Failed(f"{self.Type} Error: {method_name} {search_final} attribute not supported")
search_attr, modifier = os.path.splitext(str(search_method).lower())
if search_method not in anilist.searches:
raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported")
elif search_attr == "season":
new_dictionary[search_attr] = self._parse(search_attr, search_data, parent=method_name, default=current_season, options=util.seasons)
if "year" not in dict_methods:
@ -924,7 +934,7 @@ class CollectionBuilder:
elif search_attr == "year":
new_dictionary[search_attr] = self._parse(search_attr, search_data, datatype="int", parent=method_name, default=default_year, minimum=1917, maximum=default_year + 1)
elif search_data is None:
raise Failed(f"{self.Type} Error: {method_name} {search_final} attribute is blank")
raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute is blank")
elif search_attr == "adult":
new_dictionary[search_attr] = self._parse(search_attr, search_data, datatype="bool", parent=method_name)
elif search_attr == "country":
@ -932,17 +942,17 @@ class CollectionBuilder:
elif search_attr == "source":
new_dictionary[search_attr] = self._parse(search_attr, search_data, options=anilist.media_source, parent=method_name)
elif search_attr in ["episodes", "duration", "score", "popularity"]:
new_dictionary[search_final] = self._parse(search_final, search_data, datatype="int", parent=method_name)
new_dictionary[search_method] = self._parse(search_method, search_data, datatype="int", parent=method_name)
elif search_attr in ["format", "status", "genre", "tag", "tag_category"]:
new_dictionary[search_final] = self.config.AniList.validate(search_attr.replace("_", " ").title(), self._parse(search_final, search_data))
new_dictionary[search_method] = self.config.AniList.validate(search_attr.replace("_", " ").title(), self._parse(search_method, search_data))
elif search_attr in ["start", "end"]:
new_dictionary[search_final] = util.validate_date(search_data, f"{method_name} {search_final} attribute", return_as="%m/%d/%Y")
new_dictionary[search_method] = util.validate_date(search_data, f"{method_name} {search_method} attribute", return_as="%m/%d/%Y")
elif search_attr == "min_tag_percent":
new_dictionary[search_attr] = self._parse(search_attr, search_data, datatype="int", parent=method_name, minimum=0, maximum=100)
elif search_attr == "search":
new_dictionary[search_attr] = str(search_data)
elif search_final not in ["sort_by", "limit"]:
raise Failed(f"{self.Type} Error: {method_name} {search_final} attribute not supported")
elif search_method not in ["sort_by", "limit"]:
raise Failed(f"{self.Type} Error: {method_name} {search_method} attribute not supported")
if len(new_dictionary) == 0:
raise Failed(f"{self.Type} Error: {method_name} must have at least one valid search option")
new_dictionary["sort_by"] = self._parse("sort_by", dict_data, methods=dict_methods, parent=method_name, default="score", options=anilist.sort_options)
@ -1056,7 +1066,7 @@ class CollectionBuilder:
}))
def _plex(self, method_name, method_data):
if method_name == "plex_all":
if method_name in ["plex_all", "plex_pilots"]:
self.builders.append((method_name, self.collection_level))
elif method_name in ["plex_search", "plex_collectionless"]:
for dict_data, dict_methods in self._parse(method_name, method_data, datatype="dictlist"):
@ -1080,7 +1090,7 @@ class CollectionBuilder:
self.builders.append((method_name, self._parse(method_name, method_data, "bool")))
def _mdblist(self, method_name, method_data):
for mdb_dict in self.config.Mdblist.validate_mdb_lists(method_data, self.language):
for mdb_dict in self.config.Mdblist.validate_mdblist_lists(method_data):
self.builders.append((method_name, mdb_dict))
def _tautulli(self, method_name, method_data):
@ -1098,59 +1108,59 @@ class CollectionBuilder:
for dict_data, dict_methods in self._parse(method_name, method_data, datatype="dictlist"):
new_dictionary = {"limit": self._parse("limit", dict_data, datatype="int", methods=dict_methods, default=100, parent=method_name)}
for discover_method, discover_data in dict_data.items():
discover_attr, modifier, discover_final = self._split(discover_method)
discover_attr, modifier = os.path.splitext(str(discover_method).lower())
if discover_data is None:
raise Failed(f"{self.Type} Error: {method_name} {discover_final} attribute is blank")
elif discover_final not in tmdb.discover_all:
raise Failed(f"{self.Type} Error: {method_name} {discover_final} attribute not supported")
raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute is blank")
elif discover_method not in tmdb.discover_all:
raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute not supported")
elif self.library.is_movie and discover_attr in tmdb.discover_tv_only:
raise Failed(f"{self.Type} Error: {method_name} {discover_final} attribute only works for show libraries")
raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute only works for show libraries")
elif self.library.is_show and discover_attr in tmdb.discover_movie_only:
raise Failed(f"{self.Type} Error: {method_name} {discover_final} attribute only works for movie libraries")
raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute only works for movie libraries")
elif discover_attr in ["language", "region"]:
regex = ("([a-z]{2})-([A-Z]{2})", "en-US") if discover_attr == "language" else ("^[A-Z]{2}$", "US")
new_dictionary[discover_attr] = self._parse(discover_attr, discover_data, parent=method_name, regex=regex)
elif discover_attr == "sort_by" and self.library.is_movie:
elif discover_attr == "sort_by":
options = tmdb.discover_movie_sort if self.library.is_movie else tmdb.discover_tv_sort
new_dictionary[discover_final] = self._parse(discover_attr, discover_data, parent=method_name, options=options)
new_dictionary[discover_method] = self._parse(discover_attr, discover_data, parent=method_name, options=options)
elif discover_attr == "certification_country":
if "certification" in dict_data or "certification.lte" in dict_data or "certification.gte" in dict_data:
new_dictionary[discover_final] = discover_data
new_dictionary[discover_method] = discover_data
else:
raise Failed(f"{self.Type} Error: {method_name} {discover_attr} attribute: must be used with either certification, certification.lte, or certification.gte")
elif discover_attr == "certification":
if "certification_country" in dict_data:
new_dictionary[discover_final] = discover_data
new_dictionary[discover_method] = discover_data
else:
raise Failed(f"{self.Type} Error: {method_name} {discover_final} attribute: must be used with certification_country")
raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute: must be used with certification_country")
elif discover_attr == "watch_region":
if "with_watch_providers" in dict_data or "without_watch_providers" in dict_data or "with_watch_monetization_types" in dict_data:
new_dictionary[discover_final] = discover_data
new_dictionary[discover_method] = discover_data
else:
raise Failed(f"{self.Type} Error: {method_name} {discover_final} attribute: must be used with either with_watch_providers, without_watch_providers, or with_watch_monetization_types")
raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute: must be used with either with_watch_providers, without_watch_providers, or with_watch_monetization_types")
elif discover_attr == "with_watch_monetization_types":
if "watch_region" in dict_data:
new_dictionary[discover_final] = self._parse(discover_attr, discover_data, parent=method_name, options=tmdb.discover_monetization_types)
new_dictionary[discover_method] = self._parse(discover_attr, discover_data, parent=method_name, options=tmdb.discover_monetization_types)
else:
raise Failed(f"{self.Type} Error: {method_name} {discover_final} attribute: must be used with watch_region")
raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute: must be used with watch_region")
elif discover_attr in tmdb.discover_booleans:
new_dictionary[discover_attr] = self._parse(discover_attr, discover_data, datatype="bool", parent=method_name)
elif discover_attr == "vote_average":
new_dictionary[discover_final] = self._parse(discover_final, discover_data, datatype="float", parent=method_name)
new_dictionary[discover_method] = self._parse(discover_method, discover_data, datatype="float", parent=method_name)
elif discover_attr == "with_status":
new_dictionary[discover_attr] = self._parse(discover_attr, discover_data, datatype="int", parent=method_name, minimum=0, maximum=5)
elif discover_attr == "with_type":
new_dictionary[discover_attr] = self._parse(discover_attr, discover_data, datatype="int", parent=method_name, minimum=0, maximum=6)
elif discover_final in tmdb.discover_dates:
new_dictionary[discover_final] = util.validate_date(discover_data, f"{method_name} {discover_final} attribute", return_as="%m/%d/%Y")
elif discover_method in tmdb.discover_dates:
new_dictionary[discover_method] = util.validate_date(discover_data, f"{method_name} {discover_method} attribute", return_as="%m/%d/%Y")
elif discover_attr in tmdb.discover_years:
new_dictionary[discover_attr] = self._parse(discover_attr, discover_data, datatype="int", parent=method_name, minimum=1800, maximum=self.current_year + 1)
elif discover_attr in tmdb.discover_ints:
new_dictionary[discover_final] = self._parse(discover_final, discover_data, datatype="int", parent=method_name)
elif discover_final in tmdb.discover_strings:
new_dictionary[discover_final] = discover_data
new_dictionary[discover_method] = self._parse(discover_method, discover_data, datatype="int", parent=method_name)
elif discover_method in tmdb.discover_strings:
new_dictionary[discover_method] = discover_data
elif discover_attr != "limit":
raise Failed(f"{self.Type} Error: {method_name} {discover_final} attribute not supported")
raise Failed(f"{self.Type} Error: {method_name} {discover_method} attribute not supported")
if len(new_dictionary) > 1:
self.builders.append((method_name, new_dictionary))
else:
@ -1162,21 +1172,21 @@ class CollectionBuilder:
if method_name.endswith("_details"):
if method_name.startswith(("tmdb_collection", "tmdb_movie", "tmdb_show")):
item = self.config.TMDb.get_movie_show_or_collection(values[0], self.library.is_movie)
if hasattr(item, "overview") and item.overview:
if item.overview:
self.summaries[method_name] = item.overview
if hasattr(item, "backdrop_path") and item.backdrop_path:
self.backgrounds[method_name] = f"{self.config.TMDb.image_url}{item.backdrop_path}"
if hasattr(item, "poster_path") and item.poster_path:
self.posters[method_name] = f"{self.config.TMDb.image_url}{item.poster_path}"
if item.backdrop_url:
self.backgrounds[method_name] = item.backdrop_url
if item.poster_path:
self.posters[method_name] = item.poster_url
elif method_name.startswith(("tmdb_actor", "tmdb_crew", "tmdb_director", "tmdb_producer", "tmdb_writer")):
item = self.config.TMDb.get_person(values[0])
if hasattr(item, "biography") and item.biography:
if item.biography:
self.summaries[method_name] = item.biography
if hasattr(item, "profile_path") and item.profile_path:
self.posters[method_name] = f"{self.config.TMDb.image_url}{item.profile_path}"
if item.profile_path:
self.posters[method_name] = item.profile_url
elif method_name.startswith("tmdb_list"):
item = self.config.TMDb.get_list(values[0])
if hasattr(item, "description") and item.description:
if item.description:
self.summaries[method_name] = item.description
for value in values:
self.builders.append((method_name[:-8] if method_name.endswith("_details") else method_name, value))
@ -1204,12 +1214,10 @@ class CollectionBuilder:
if method_name.endswith("_details"):
if method_name.startswith(("tvdb_movie", "tvdb_show")):
item = self.config.TVDb.get_item(values[0], method_name.startswith("tvdb_movie"))
if hasattr(item, "description") and item.description:
self.summaries[method_name] = item.description
if hasattr(item, "background_path") and item.background_path:
self.backgrounds[method_name] = f"{self.config.TMDb.image_url}{item.background_path}"
if hasattr(item, "poster_path") and item.poster_path:
self.posters[method_name] = f"{self.config.TMDb.image_url}{item.poster_path}"
if item.background_path:
self.backgrounds[method_name] = item.background_path
if item.poster_path:
self.posters[method_name] = item.poster_path
elif method_name.startswith("tvdb_list"):
self.summaries[method_name] = self.config.TVDb.get_list_description(values[0])
for value in values:
@ -1348,7 +1356,7 @@ class CollectionBuilder:
if tvdb_id not in self.missing_shows:
self.missing_shows.append(tvdb_id)
except Failed as e:
logger.error(e)
logger.warning(e)
elif show_id not in self.missing_shows:
self.missing_shows.append(show_id)
else:
@ -1366,7 +1374,7 @@ class CollectionBuilder:
try:
input_id = self.config.Convert.tmdb_to_tvdb(input_id, fail=True)
except Failed as e:
logger.error(e)
logger.warning(e)
continue
if input_id not in self.ignore_ids:
if input_id in self.library.show_map:
@ -1388,7 +1396,7 @@ class CollectionBuilder:
if tvdb_id not in self.missing_shows:
self.missing_shows.append(tvdb_id)
except Failed as e:
logger.error(e)
logger.warning(e)
continue
if not isinstance(rating_keys, list):
rating_keys = [rating_keys]
@ -1513,54 +1521,63 @@ class CollectionBuilder:
display_line = f"{indent}{param_s} {mod_s} {arg_s}"
return f"{arg_key}{mod}={arg}&", display_line
error = None
if final_attr not in plex.searches and not final_attr.startswith(("any", "all")):
raise Failed(f"{self.Type} Error: {final_attr} is not a valid {method} attribute")
error = f"{self.Type} Error: {final_attr} is not a valid {method} attribute"
elif self.library.is_show and final_attr in plex.movie_only_searches:
raise Failed(f"{self.Type} Error: {final_attr} {method} attribute only works for movie libraries")
error = f"{self.Type} Error: {final_attr} {method} attribute only works for movie libraries"
elif self.library.is_movie and final_attr in plex.show_only_searches:
raise Failed(f"{self.Type} Error: {final_attr} {method} attribute only works for show libraries")
error = f"{self.Type} Error: {final_attr} {method} attribute only works for show libraries"
elif self.library.is_music and final_attr not in plex.music_searches:
raise Failed(f"{self.Type} Error: {final_attr} {method} attribute does not work for music libraries")
error = f"{self.Type} Error: {final_attr} {method} attribute does not work for music libraries"
elif not self.library.is_music and final_attr in plex.music_searches:
raise Failed(f"{self.Type} Error: {final_attr} {method} attribute only works for music libraries")
elif _data is None:
raise Failed(f"{self.Type} Error: {final_attr} {method} attribute is blank")
elif final_attr.startswith(("any", "all")):
dicts = util.get_list(_data)
results = ""
display_add = ""
for dict_data in dicts:
if not isinstance(dict_data, dict):
raise Failed(f"{self.Type} Error: {attr} must be either a dictionary or list of dictionaries")
inside_filter, inside_display = _filter(dict_data, is_all=attr == "all", level=level)
if len(inside_filter) > 0:
display_add += inside_display
results += f"{conjunction if len(results) > 0 else ''}push=1&{inside_filter}pop=1&"
error = f"{self.Type} Error: {final_attr} {method} attribute only works for music libraries"
elif _data is not False and not _data:
error = f"{self.Type} Error: {final_attr} {method} attribute is blank"
else:
validation = self.validate_attribute(attr, modifier, final_attr, _data, validate, pairs=True)
if validation is None:
continue
elif attr in plex.date_attributes and modifier in ["", ".not"]:
last_text = "is not in the last" if modifier == ".not" else "is in the last"
last_mod = "%3E%3E" if modifier == "" else "%3C%3C"
results, display_add = build_url_arg(f"-{validation}d", mod=last_mod, arg_s=f"{validation} Days", mod_s=last_text)
elif attr == "duration" and modifier in [".gt", ".gte", ".lt", ".lte"]:
results, display_add = build_url_arg(validation * 60000)
elif attr in plex.boolean_attributes:
bool_mod = "" if validation else "!"
bool_arg = "true" if validation else "false"
results, display_add = build_url_arg(1, mod=bool_mod, arg_s=bool_arg, mod_s="is")
elif (attr in plex.tag_attributes + plex.string_attributes + plex.year_attributes) and modifier in ["", ".is", ".isnot", ".not", ".begins", ".ends"]:
if final_attr.startswith(("any", "all")):
dicts = util.get_list(_data)
results = ""
display_add = ""
for og_value, result in validation:
built_arg = build_url_arg(quote(str(result)) if attr in plex.string_attributes else result, arg_s=og_value)
display_add += built_arg[1]
results += f"{conjunction if len(results) > 0 else ''}{built_arg[0]}"
for dict_data in dicts:
if not isinstance(dict_data, dict):
raise Failed(
f"{self.Type} Error: {attr} must be either a dictionary or list of dictionaries")
inside_filter, inside_display = _filter(dict_data, is_all=attr == "all", level=level)
if len(inside_filter) > 0:
display_add += inside_display
results += f"{conjunction if len(results) > 0 else ''}push=1&{inside_filter}pop=1&"
else:
results, display_add = build_url_arg(validation)
display += display_add
output += f"{conjunction if len(output) > 0 else ''}{results}"
validation = self.validate_attribute(attr, modifier, final_attr, _data, validate, pairs=True)
if validation is None:
continue
elif attr in plex.date_attributes and modifier in ["", ".not"]:
last_text = "is not in the last" if modifier == ".not" else "is in the last"
last_mod = "%3E%3E" if modifier == "" else "%3C%3C"
results, display_add = build_url_arg(f"-{validation}d", mod=last_mod, arg_s=f"{validation} Days", mod_s=last_text)
elif attr == "duration" and modifier in [".gt", ".gte", ".lt", ".lte"]:
results, display_add = build_url_arg(validation * 60000)
elif attr in plex.boolean_attributes:
bool_mod = "" if validation else "!"
bool_arg = "true" if validation else "false"
results, display_add = build_url_arg(1, mod=bool_mod, arg_s=bool_arg, mod_s="is")
elif (attr in plex.tag_attributes + plex.string_attributes + plex.year_attributes) and modifier in ["", ".is", ".isnot", ".not", ".begins", ".ends"]:
results = ""
display_add = ""
for og_value, result in validation:
built_arg = build_url_arg(quote(str(result)) if attr in plex.string_attributes else result, arg_s=og_value)
display_add += built_arg[1]
results += f"{conjunction if len(results) > 0 else ''}{built_arg[0]}"
else:
results, display_add = build_url_arg(validation)
display += display_add
output += f"{conjunction if len(output) > 0 else ''}{results}"
if error:
if validate:
raise Failed(error)
else:
logger.error(error)
continue
return output, display
if "any" not in filter_alias and "all" not in filter_alias:
@ -1619,7 +1636,7 @@ class CollectionBuilder:
return smart_pair(util.get_list(data, split=False))
elif attribute == "original_language":
return util.get_list(data, lower=True)
elif attribute == "filepath":
elif attribute in ["filepath", "tmdb_genre"]:
return util.get_list(data)
elif attribute == "history":
try:
@ -1783,20 +1800,21 @@ class CollectionBuilder:
if item is None:
item = self.config.TMDb.get_movie(item_id) if is_movie else self.config.TMDb.get_show(self.config.Convert.tvdb_to_tmdb(item_id))
if check_released:
if util.validate_date(item.release_date if is_movie else item.first_air_date, "") > self.current_time:
date_to_check = item.release_date if is_movie else item.first_air_date
if not date_to_check or date_to_check > self.current_time:
return False
for filter_method, filter_data in self.tmdb_filters:
filter_attr, modifier, filter_final = self._split(filter_method)
if filter_attr == "original_language":
if (modifier == ".not" and item.original_language in filter_data) \
or (modifier == "" and item.original_language not in filter_data):
if (modifier == ".not" and item.original_language.iso_639_1 in filter_data) \
or (modifier == "" and item.original_language.iso_639_1 not in filter_data):
return False
elif filter_attr in ["first_episode_aired", "last_episode_aired"]:
tmdb_date = None
if filter_attr == "first_episode_aired":
tmdb_date = util.validate_date(item.first_air_date, "TMDB First Air Date")
tmdb_date = item.first_air_date
elif filter_attr == "last_episode_aired":
tmdb_date = util.validate_date(item.last_air_date, "TMDB Last Air Date")
tmdb_date = item.last_air_date
if util.is_date_filter(tmdb_date, modifier, filter_data, filter_final, self.current_time):
return False
elif modifier in [".gt", ".gte", ".lt", ".lte"]:
@ -1805,12 +1823,15 @@ class CollectionBuilder:
attr = item.vote_count
elif filter_attr == "tmdb_year" and is_movie:
attr = item.year
elif filter_attr == "tmdb_year" and not is_movie:
air_date = item.first_air_date
if air_date:
attr = util.validate_date(air_date, "TMDb Year Filter").year
elif filter_attr == "tmdb_year" and not is_movie and item.first_air_date:
attr = item.first_air_date.year
if util.is_number_filter(attr, modifier, filter_data):
return False
elif filter_attr == "tmdb_genre":
attrs = [g.name for g in item.genres]
if (not list(set(filter_data) & set(attrs)) and modifier == "") \
or (list(set(filter_data) & set(attrs)) and modifier == ".not"):
return False
except Failed:
return False
return True
@ -1944,7 +1965,7 @@ class CollectionBuilder:
except Failed as e:
logger.error(e)
continue
current_title = f"{movie.title} ({util.validate_date(movie.release_date, 'test').year})" if movie.release_date else movie.title
current_title = f"{movie.title} ({movie.release_date.year})" if movie.release_date else movie.title
if self.check_tmdb_filter(missing_id, True, item=movie, check_released=self.details["missing_only_released"]):
missing_movies_with_names.append((current_title, missing_id))
if self.details["show_missing"] is True:
@ -1962,7 +1983,9 @@ class CollectionBuilder:
if self.library.Radarr:
if self.radarr_details["add_missing"]:
try:
added_to_radarr += self.library.Radarr.add_tmdb(missing_tmdb_ids, **self.radarr_details)
added = self.library.Radarr.add_tmdb(missing_tmdb_ids, **self.radarr_details)
self.added_to_radarr.extend([movie.tmdbId for movie in added])
added_to_radarr += len(added)
except Failed as e:
logger.error(e)
if "item_radarr_tag" in self.item_details:
@ -2001,7 +2024,9 @@ class CollectionBuilder:
if self.library.Sonarr:
if self.sonarr_details["add_missing"]:
try:
added_to_sonarr += self.library.Sonarr.add_tvdb(missing_tvdb_ids, **self.sonarr_details)
added = self.library.Sonarr.add_tvdb(missing_tvdb_ids, **self.sonarr_details)
self.added_to_sonarr.extend([show.tvdbId for show in added])
added_to_sonarr += len(added)
except Failed as e:
logger.error(e)
if "item_sonarr_tag" in self.item_details:
@ -2063,6 +2088,13 @@ class CollectionBuilder:
remove_tags = self.item_details["item_label.remove"] if "item_label.remove" in self.item_details else None
sync_tags = self.item_details["item_label.sync"] if "item_label.sync" in self.item_details else None
if "non_item_remove_label" in self.item_details:
rk_compare = [item.rakingKey for item in self.items]
for remove_label in self.item_details["non_item_remove_label"]:
for non_item in self.library.get_labeled_items(remove_label):
if non_item.ratingKey not in rk_compare:
self.library.edit_tags("label", non_item, remove_tags=[remove_label])
tmdb_paths = []
tvdb_paths = []
for item in self.items:
@ -2084,20 +2116,22 @@ class CollectionBuilder:
path = path[:-1] if path.endswith(('/', '\\')) else path
tvdb_paths.append((self.library.show_rating_key_map[item.ratingKey], path))
advance_edits = {}
for method_name, method_data in self.item_details.items():
if method_name in plex.item_advance_keys:
key, options = plex.item_advance_keys[method_name]
if getattr(item, key) != options[method_data]:
advance_edits[key] = options[method_data]
if hasattr(item, "preferences"):
prefs = [p.id for p in item.preferences()]
for method_name, method_data in self.item_details.items():
if method_name in plex.item_advance_keys:
key, options = plex.item_advance_keys[method_name]
if key in prefs and getattr(item, key) != options[method_data]:
advance_edits[key] = options[method_data]
self.library.edit_item(item, item.title, self.collection_level.capitalize(), advance_edits, advanced=True)
if "item_tmdb_season_titles" in self.item_details and item.ratingKey in self.library.show_rating_key_map:
try:
tmdb_id = self.config.Convert.tvdb_to_tmdb(self.library.show_rating_key_map[item.ratingKey])
names = {str(s.season_number): s.name for s in self.config.TMDb.get_show(tmdb_id).seasons}
names = {s.season_number: s.name for s in self.config.TMDb.get_show(tmdb_id).seasons}
for season in self.library.query(item.seasons):
if str(season.index) in names:
self.library.edit_query(season, {"title.locked": 1, "title.value": names[str(season.index)]})
if season.index in names:
self.library.edit_query(season, {"title.locked": 1, "title.value": names[season.index]})
except Failed as e:
logger.error(e)
@ -2110,19 +2144,24 @@ class CollectionBuilder:
if "item_lock_title" in self.item_details:
self.library.edit_query(item, {"title.locked": 1})
if "item_refresh" in self.item_details:
delay = self.item_details["item_refresh_delay"] if "item_refresh_delay" in self.item_details else self.library.item_refresh_delay
if delay > 0:
time.sleep(delay)
self.library.query(item.refresh)
if self.library.Radarr and tmdb_paths:
if "item_radarr_tag" in self.item_details:
self.library.Radarr.edit_tags([t[0] if isinstance(t, tuple) else t for t in tmdb_paths], self.item_details["item_radarr_tag"], self.item_details["apply_tags"])
if self.radarr_details["add_existing"]:
self.library.Radarr.add_tmdb(tmdb_paths, **self.radarr_details)
added = self.library.Radarr.add_tmdb(tmdb_paths, **self.radarr_details)
self.added_to_radarr.extend([movie.tmdbId for movie in added])
if self.library.Sonarr and tvdb_paths:
if "item_sonarr_tag" in self.item_details:
self.library.Sonarr.edit_tags([t[0] if isinstance(t, tuple) else t for t in tvdb_paths], self.item_details["item_sonarr_tag"], self.item_details["apply_tags"])
if self.sonarr_details["add_existing"]:
self.library.Sonarr.add_tvdb(tvdb_paths, **self.sonarr_details)
added = self.library.Sonarr.add_tvdb(tvdb_paths, **self.sonarr_details)
self.added_to_sonarr.extend([show.tvdbId for show in added])
for rating_key in rating_keys:
try:
@ -2400,6 +2439,8 @@ class CollectionBuilder:
deleted=self.deleted,
additions=self.notification_additions,
removals=self.notification_removals,
radarr=self.added_to_radarr,
sonarr=self.added_to_sonarr,
playlist=playlist
)
except Failed as e:
@ -2413,6 +2454,8 @@ class CollectionBuilder:
rating_keys = []
amount_added = 0
self.notification_additions = []
self.added_to_radarr = []
self.added_to_sonarr = []
for mm in self.run_again_movies:
if mm in self.library.movie_map:
rating_keys.extend(self.library.movie_map[mm])
@ -2453,7 +2496,7 @@ class CollectionBuilder:
logger.error(e)
continue
if self.details["show_missing"] is True:
current_title = f"{movie.title} ({util.validate_date(movie.release_date, 'test').year})" if movie.release_date else movie.title
current_title = f"{movie.title} ({movie.release_date.year})" if movie.release_date else movie.title
logger.info(f"{name} {self.Type} | ? | {current_title} (TMDb: {missing_id})")
logger.info("")
logger.info(f"{len(self.run_again_movies)} Movie{'s' if len(self.run_again_movies) > 1 else ''} Missing")
@ -2471,4 +2514,4 @@ class CollectionBuilder:
logger.info(f"{name} {self.Type} | ? | {title} (TVDb: {missing_id})")
logger.info(f"{len(self.run_again_shows)} Show{'s' if len(self.run_again_shows) > 1 else ''} Missing")
return amount_added
return amount_added

View file

@ -54,6 +54,7 @@ class ConfigFile:
self.run_hour = datetime.strptime(attrs["time"], "%H:%M").hour
self.requested_collections = util.get_list(attrs["collections"]) if "collections" in attrs else None
self.requested_libraries = util.get_list(attrs["libraries"]) if "libraries" in attrs else None
self.requested_metadata_files = util.get_list(attrs["metadata_files"]) if "metadata_files" in attrs else None
self.resume_from = attrs["resume"] if "resume" in attrs else None
yaml.YAML().allow_duplicate_keys = True
@ -126,6 +127,8 @@ class ConfigFile:
temp = new_config.pop("settings")
if "collection_minimum" in temp:
temp["minimum_items"] = temp.pop("collection_minimum")
if "playlist_sync_to_user" in temp:
temp["playlist_sync_to_users"] = temp.pop("playlist_sync_to_user")
new_config["settings"] = temp
if "webhooks" in new_config:
temp = new_config.pop("webhooks")
@ -266,9 +269,12 @@ class ConfigFile:
"dimensional_asset_rename": check_for_attribute(self.data, "dimensional_asset_rename", parent="settings", var_type="bool", default=False),
"download_url_assets": check_for_attribute(self.data, "download_url_assets", parent="settings", var_type="bool", default=False),
"show_missing_season_assets": check_for_attribute(self.data, "show_missing_season_assets", parent="settings", var_type="bool", default=False),
"show_missing_episode_assets": check_for_attribute(self.data, "show_missing_episode_assets", parent="settings", var_type="bool", default=False),
"show_asset_not_needed": check_for_attribute(self.data, "show_asset_not_needed", parent="settings", var_type="bool", default=True),
"sync_mode": check_for_attribute(self.data, "sync_mode", parent="settings", default="append", test_list=sync_modes),
"default_collection_order": check_for_attribute(self.data, "default_collection_order", parent="settings", default_is_none=True),
"minimum_items": check_for_attribute(self.data, "minimum_items", parent="settings", var_type="int", default=1),
"item_refresh_delay": check_for_attribute(self.data, "item_refresh_delay", parent="settings", var_type="int", default=0),
"delete_below_minimum": check_for_attribute(self.data, "delete_below_minimum", parent="settings", var_type="bool", default=False),
"delete_not_scheduled": check_for_attribute(self.data, "delete_not_scheduled", parent="settings", var_type="bool", default=False),
"run_again_delay": check_for_attribute(self.data, "run_again_delay", parent="settings", var_type="int", default=0),
@ -283,10 +289,12 @@ class ConfigFile:
"tvdb_language": check_for_attribute(self.data, "tvdb_language", parent="settings", default="default"),
"ignore_ids": check_for_attribute(self.data, "ignore_ids", parent="settings", var_type="int_list", default_is_none=True),
"ignore_imdb_ids": check_for_attribute(self.data, "ignore_imdb_ids", parent="settings", var_type="list", default_is_none=True),
"playlist_sync_to_user": check_for_attribute(self.data, "playlist_sync_to_user", parent="settings", default="all", default_is_none=True),
"playlist_sync_to_users": check_for_attribute(self.data, "playlist_sync_to_users", parent="settings", default="all", default_is_none=True),
"verify_ssl": check_for_attribute(self.data, "verify_ssl", parent="settings", var_type="bool", default=True),
"custom_repo": check_for_attribute(self.data, "custom_repo", parent="settings", default_is_none=True),
"assets_for_all": check_for_attribute(self.data, "assets_for_all", parent="settings", var_type="bool", default=False, save=False, do_print=False)
}
self.custom_repo = self.general["custom_repo"]
self.session = requests.Session()
if not self.general["verify_ssl"]:
@ -412,7 +420,7 @@ class ConfigFile:
except Failed as e:
self.errors.append(e)
logger.error(e)
logger.info(f"My Anime List Connection {'Failed Continuing as Guest ' if self.MyAnimeList is None else 'Successful'}")
logger.info(f"AniDB Connection {'Failed Continuing as Guest ' if self.MyAnimeList is None else 'Successful'}")
if self.AniDB is None:
self.AniDB = AniDB(self, None)
@ -443,6 +451,9 @@ class ConfigFile:
git = check_dict("git")
if git:
playlists_pairs.append(("Git", git))
repo = check_dict("repo")
if repo:
playlists_pairs.append(("Repo", repo))
file = check_dict("file")
if file:
playlists_pairs.append(("File", file))
@ -575,7 +586,10 @@ class ConfigFile:
params["dimensional_asset_rename"] = check_for_attribute(lib, "dimensional_asset_rename", parent="settings", var_type="bool", default=self.general["dimensional_asset_rename"], do_print=False, save=False)
params["download_url_assets"] = check_for_attribute(lib, "download_url_assets", parent="settings", var_type="bool", default=self.general["download_url_assets"], do_print=False, save=False)
params["show_missing_season_assets"] = check_for_attribute(lib, "show_missing_season_assets", parent="settings", var_type="bool", default=self.general["show_missing_season_assets"], do_print=False, save=False)
params["show_missing_episode_assets"] = check_for_attribute(lib, "show_missing_episode_assets", parent="settings", var_type="bool", default=self.general["show_missing_episode_assets"], do_print=False, save=False)
params["show_asset_not_needed"] = check_for_attribute(lib, "show_asset_not_needed", parent="settings", var_type="bool", default=self.general["show_asset_not_needed"], do_print=False, save=False)
params["minimum_items"] = check_for_attribute(lib, "minimum_items", parent="settings", var_type="int", default=self.general["minimum_items"], do_print=False, save=False)
params["item_refresh_delay"] = check_for_attribute(lib, "item_refresh_delay", parent="settings", var_type="int", default=self.general["item_refresh_delay"], do_print=False, save=False)
params["delete_below_minimum"] = check_for_attribute(lib, "delete_below_minimum", parent="settings", var_type="bool", default=self.general["delete_below_minimum"], do_print=False, save=False)
params["delete_not_scheduled"] = check_for_attribute(lib, "delete_not_scheduled", parent="settings", var_type="bool", default=self.general["delete_not_scheduled"], do_print=False, save=False)
params["delete_unmanaged_collections"] = check_for_attribute(lib, "delete_unmanaged_collections", parent="settings", var_type="bool", default=False, do_print=False, save=False)
@ -653,8 +667,14 @@ class ConfigFile:
if lib["operations"]["genre_mapper"] and isinstance(lib["operations"]["genre_mapper"], dict):
params["genre_mapper"] = {}
for new_genre, old_genres in lib["operations"]["genre_mapper"].items():
for old_genre in util.get_list(old_genres, split=False):
params["genre_mapper"][old_genre] = new_genre
if old_genres is None:
params["genre_mapper"][new_genre] = old_genres
else:
for old_genre in util.get_list(old_genres):
if old_genre == new_genre:
logger.error("Config Error: genres cannot be mapped to themselves")
else:
params["genre_mapper"][old_genre] = new_genre
else:
logger.error("Config Error: genre_mapper is blank")
if "genre_collections" in lib["operations"]:
@ -719,6 +739,7 @@ class ConfigFile:
params["metadata_path"].append((name, path[attr]))
check_dict("url", "URL")
check_dict("git", "Git")
check_dict("repo", "Repo")
check_dict("file", "File")
check_dict("folder", "Folder")
else:

View file

@ -75,9 +75,9 @@ class Convert:
elif anidb_id in self.anidb_to_tvdb:
ids.append((self.anidb_to_tvdb[anidb_id], "tvdb"))
elif anidb_id in self.anidb_ids:
logger.error(f"Convert Error: No TVDb ID or IMDb ID found for AniDB ID: {anidb_id}")
logger.warning(f"Convert Error: No TVDb ID or IMDb ID found for AniDB ID: {anidb_id}")
else:
logger.error(f"Convert Error: AniDB ID: {anidb_id} not found")
logger.warning(f"Convert Error: AniDB ID: {anidb_id} not found")
return ids
def anilist_to_ids(self, anilist_ids, library):
@ -86,7 +86,7 @@ class Convert:
if anilist_id in self.anilist_to_anidb:
anidb_ids.append(self.anilist_to_anidb[anilist_id])
else:
logger.error(f"Convert Error: AniDB ID not found for AniList ID: {anilist_id}")
logger.warning(f"Convert Error: AniDB ID not found for AniList ID: {anilist_id}")
return self.anidb_to_ids(anidb_ids, library)
def myanimelist_to_ids(self, mal_ids, library):
@ -97,7 +97,7 @@ class Convert:
elif int(mal_id) in self.mal_to_anidb:
ids.extend(self.anidb_to_ids(self.mal_to_anidb[int(mal_id)], library))
else:
logger.error(f"Convert Error: AniDB ID not found for MyAnimeList ID: {mal_id}")
logger.warning(f"Convert Error: AniDB ID not found for MyAnimeList ID: {mal_id}")
return ids
def tmdb_to_imdb(self, tmdb_id, is_movie=True, fail=False):

View file

@ -1,4 +1,4 @@
import logging, os, requests, shutil, time
import logging, os, shutil, time
from abc import ABC, abstractmethod
from modules import util
from modules.meta import MetadataFile
@ -46,9 +46,12 @@ class Library(ABC):
self.dimensional_asset_rename = params["dimensional_asset_rename"]
self.download_url_assets = params["download_url_assets"]
self.show_missing_season_assets = params["show_missing_season_assets"]
self.show_missing_episode_assets = params["show_missing_episode_assets"]
self.show_asset_not_needed = params["show_asset_not_needed"]
self.sync_mode = params["sync_mode"]
self.default_collection_order = params["default_collection_order"]
self.minimum_items = params["minimum_items"]
self.item_refresh_delay = params["item_refresh_delay"]
self.delete_below_minimum = params["delete_below_minimum"]
self.delete_not_scheduled = params["delete_not_scheduled"]
self.missing_only_released = params["missing_only_released"]
@ -85,12 +88,12 @@ class Library(ABC):
self.stats = {"created": 0, "modified": 0, "deleted": 0, "added": 0, "unchanged": 0, "removed": 0, "radarr": 0, "sonarr": 0}
self.status = {}
self.tmdb_library_operation = self.assets_for_all or self.mass_genre_update or self.mass_audience_rating_update \
or self.mass_critic_rating_update or self.mass_trakt_rating_update \
self.items_library_operation = self.assets_for_all or self.mass_genre_update or self.mass_audience_rating_update \
or self.mass_critic_rating_update or self.mass_trakt_rating_update or self.genre_mapper \
or self.tmdb_collections or self.radarr_add_all_existing or self.sonarr_add_all_existing
self.library_operation = self.tmdb_library_operation or self.delete_unmanaged_collections or self.delete_collections_with_less \
self.library_operation = self.items_library_operation or self.delete_unmanaged_collections or self.delete_collections_with_less \
or self.radarr_remove_by_tag or self.sonarr_remove_by_tag or self.mass_collection_mode \
or self.genre_collections or self.genre_mapper or self.show_unmanaged
or self.genre_collections or self.show_unmanaged
metadata = []
for file_type, metadata_file in self.metadata_path:
if file_type == "Folder":
@ -143,7 +146,7 @@ class Library(ABC):
self._upload_image(item, poster)
poster_uploaded = True
logger.info(f"Detail: {poster.attribute} updated {poster.message}")
else:
elif self.show_asset_not_needed:
logger.info(f"Detail: {poster.prefix}poster update not needed")
except Failed:
util.print_stacktrace()
@ -193,7 +196,7 @@ class Library(ABC):
self._upload_image(item, background)
background_uploaded = True
logger.info(f"Detail: {background.attribute} updated {background.message}")
else:
elif self.show_asset_not_needed:
logger.info(f"Detail: {background.prefix}background update not needed")
except Failed:
util.print_stacktrace()

View file

@ -1,19 +1,20 @@
import logging
from modules import util
from modules.util import Failed
from urllib.parse import urlparse
logger = logging.getLogger("Plex Meta Manager")
builders = ["mdblist_list"]
base_url = "https://mdblist.com/lists"
headers = { 'User-Agent': 'Plex-Meta-Manager' }
headers = {"User-Agent": "Plex-Meta-Manager"}
class Mdblist:
def __init__(self, config):
self.config = config
def validate_mdb_lists(self, mdb_lists, language):
def validate_mdblist_lists(self, mdb_lists):
valid_lists = []
for mdb_dict in util.get_list(mdb_lists, split=False):
if not isinstance(mdb_dict, dict):
@ -49,7 +50,9 @@ class Mdblist:
if method == "mdblist_list":
limit_status = f" Limit at: {data['limit']} items" if data['limit'] > 0 else ''
logger.info(f"Processing Mdblist.com List: {data['url']}{limit_status}")
url = f"{data['url']}?limit={data['limit']}"
return [(i["imdb_id"], "imdb") for i in self.config.get_json(url,headers=headers)]
parsed_url = urlparse(data["url"])
url_base = parsed_url._replace(query=None).geturl()
params = {"limit": data["limit"]} if data["limit"] > 0 else None
return [(i["imdb_id"], "imdb") for i in self.config.get_json(url_base, headers=headers, params=params)]
else:
raise Failed(f"Mdblist Error: Method {method} not supported")

View file

@ -31,18 +31,18 @@ def get_dict(attribute, attr_data, check_list=None):
new_dict = {}
for _name, _data in attr_data[attribute].items():
if _name in check_list:
logger.error(f"Config Warning: Skipping duplicate {attribute[:-1] if attribute[-1] == 's' else attribute}: {_name}")
logger.warning(f"Config Warning: Skipping duplicate {attribute[:-1] if attribute[-1] == 's' else attribute}: {_name}")
elif _data is None:
logger.error(f"Config Warning: {attribute[:-1] if attribute[-1] == 's' else attribute}: {_name} has no data")
logger.error(f"Config Error: {attribute[:-1] if attribute[-1] == 's' else attribute}: {_name} has no data")
elif not isinstance(_data, dict):
logger.error(f"Config Warning: {attribute[:-1] if attribute[-1] == 's' else attribute}: {_name} must be a dictionary")
logger.error(f"Config Error: {attribute[:-1] if attribute[-1] == 's' else attribute}: {_name} must be a dictionary")
else:
new_dict[str(_name)] = _data
return new_dict
else:
logger.warning(f"Config Warning: {attribute} must be a dictionary")
logger.error(f"Config Error: {attribute} must be a dictionary")
else:
logger.warning(f"Config Warning: {attribute} attribute is blank")
logger.error(f"Config Error: {attribute} attribute is blank")
return None
@ -54,10 +54,21 @@ class DataFile:
self.data_type = ""
self.templates = {}
def get_file_name(self):
data = f"{github_base}{self.path}.yml" if self.type == "GIT" else self.path
if "/" in data:
return data[data.rfind("/") + 1:-4]
elif "\\" in data:
return data[data.rfind("\\") + 1:-4]
else:
return data
def load_file(self):
try:
if self.type in ["URL", "Git"]:
content_path = self.path if self.type == "URL" else f"{github_base}{self.path}.yml"
if self.type in ["URL", "Git", "Repo"]:
if self.type == "Repo" and not self.config.custom_repo:
raise Failed("Config Error: No custom_repo defined")
content_path = self.path if self.type == "URL" else f"{self.config.custom_repo if self.type == 'Repo' else github_base}{self.path}.yml"
response = self.config.get(content_path)
if response.status_code >= 400:
raise Failed(f"URL Error: No file found at {content_path}")
@ -227,11 +238,11 @@ class MetadataFile(DataFile):
logger.info("")
logger.info(f"Loading Metadata {file_type}: {path}")
data = self.load_file()
self.metadata = get_dict("metadata", data, library.metadatas)
self.metadata = get_dict("metadata", data, library.metadata_files)
self.templates = get_dict("templates", data)
self.collections = get_dict("collections", data, library.collections)
if self.metadata is None and self.collections is None:
if not self.metadata and not self.collections:
raise Failed("YAML Error: metadata or collections attribute is required")
logger.info(f"Metadata File Loaded Successfully")
@ -294,7 +305,6 @@ class MetadataFile(DataFile):
updated = False
edits = {}
advance_edits = {}
def add_edit(name, current_item, group, alias, key=None, value=None, var_type="str"):
if value or name in alias:
@ -334,21 +344,6 @@ class MetadataFile(DataFile):
else:
logger.error(f"Metadata Error: {name} attribute is blank")
def add_advanced_edit(attr, obj, group, alias, new_agent=False):
key, options = plex.item_advance_keys[f"item_{attr}"]
if attr in alias:
if new_agent and self.library.agent not in plex.new_plex_agents:
logger.error(f"Metadata Error: {attr} attribute only works for with the New Plex Movie Agent and New Plex TV Agent")
elif group[alias[attr]]:
method_data = str(group[alias[attr]]).lower()
if method_data not in options:
logger.error(f"Metadata Error: {group[alias[attr]]} {attr} attribute invalid")
elif getattr(obj, key) != options[method_data]:
advance_edits[key] = options[method_data]
logger.info(f"Detail: {attr} updated to {method_data}")
else:
logger.error(f"Metadata Error: {attr} attribute is blank")
logger.info("")
util.separator()
logger.info("")
@ -423,15 +418,15 @@ class MetadataFile(DataFile):
summary = None
genres = []
if tmdb_item:
originally_available = tmdb_item.release_date if tmdb_is_movie else tmdb_item.first_air_date
if tmdb_item and tmdb_is_movie is True and tmdb_item.original_title != tmdb_item.title:
originally_available = datetime.strftime(tmdb_item.release_date if tmdb_is_movie else tmdb_item.first_air_date, "%Y-%m-%d")
if tmdb_is_movie and tmdb_item.original_title != tmdb_item.title:
original_title = tmdb_item.original_title
elif tmdb_item and tmdb_is_movie is False and tmdb_item.original_name != tmdb_item.name:
elif not tmdb_is_movie and tmdb_item.original_name != tmdb_item.name:
original_title = tmdb_item.original_name
rating = tmdb_item.vote_average
if tmdb_is_movie is True and tmdb_item.production_companies:
if tmdb_is_movie and tmdb_item.production_companies:
studio = tmdb_item.production_companies[0].name
elif tmdb_is_movie is False and tmdb_item.networks:
elif not tmdb_is_movie and tmdb_item.networks:
studio = tmdb_item.networks[0].name
tagline = tmdb_item.tagline if len(tmdb_item.tagline) > 0 else None
summary = tmdb_item.overview
@ -454,9 +449,21 @@ class MetadataFile(DataFile):
updated = True
advance_edits = {}
prefs = [p.id for p in item.preferences()]
for advance_edit in advance_tags_to_edit[self.library.type]:
is_new_agent = advance_edit in ["metadata_language", "use_original_title"]
add_advanced_edit(advance_edit, item, meta, methods, new_agent=is_new_agent)
key, options = plex.item_advance_keys[f"item_{advance_edit}"]
if advance_edit in methods:
if advance_edit in ["metadata_language", "use_original_title"] and self.library.agent not in plex.new_plex_agents:
logger.error(f"Metadata Error: {advance_edit} attribute only works for with the New Plex Movie Agent and New Plex TV Agent")
elif meta[methods[advance_edit]]:
method_data = str(meta[methods[advance_edit]]).lower()
if method_data not in options:
logger.error(f"Metadata Error: {meta[methods[advance_edit]]} {advance_edit} attribute invalid")
elif key in prefs and getattr(item, key) != options[method_data]:
advance_edits[key] = options[method_data]
logger.info(f"Detail: {advance_edit} updated to {method_data}")
else:
logger.error(f"Metadata Error: {advance_edit} attribute is blank")
if self.library.edit_item(item, mapping_name, self.library.type, advance_edits, advanced=True):
updated = True
@ -474,16 +481,17 @@ class MetadataFile(DataFile):
elif not isinstance(meta[methods["seasons"]], dict):
logger.error("Metadata Error: seasons attribute must be a dictionary")
else:
seasons = {}
for season in item.seasons():
seasons[season.title] = season
seasons[int(season.index)] = season
for season_id, season_dict in meta[methods["seasons"]].items():
updated = False
logger.info("")
logger.info(f"Updating season {season_id} of {mapping_name}...")
try:
if isinstance(season_id, int):
season = item.season(season=season_id)
else:
season = item.season(title=season_id)
except NotFound:
if season_id in seasons:
season = seasons[season_id]
else:
logger.error(f"Metadata Error: Season: {season_id} not found")
continue
season_methods = {sm.lower(): sm for sm in season_dict}
@ -516,16 +524,17 @@ class MetadataFile(DataFile):
elif not isinstance(season_dict[season_methods["episodes"]], dict):
logger.error("Metadata Error: episodes attribute must be a dictionary")
else:
episodes = {}
for episode in season.episodes():
episodes[episode.title] = episode
episodes[int(episode.index)] = episode
for episode_str, episode_dict in season_dict[season_methods["episodes"]].items():
updated = False
logger.info("")
logger.info(f"Updating episode {episode_str} in {season_id} of {mapping_name}...")
try:
if isinstance(episode_str, int):
episode = season.episode(episode=episode_str)
else:
episode = season.episode(title=episode_str)
except NotFound:
if episode_str in episodes:
episode = episodes[episode_str]
else:
logger.error(f"Metadata Error: Episode {episode_str} in Season {season_id} not found")
continue
episode_methods = {em.lower(): em for em in episode_dict}
@ -614,24 +623,21 @@ class MetadataFile(DataFile):
elif not isinstance(meta[methods["albums"]], dict):
logger.error("Metadata Error: albums attribute must be a dictionary")
else:
albums = {album.title: album for album in item.albums()}
for album_name, album_dict in meta[methods["albums"]].items():
updated = False
title = None
album_methods = {am.lower(): am for am in album_dict}
logger.info("")
logger.info(f"Updating album {album_name} of {mapping_name}...")
try:
album = item.album(album_name)
except NotFound:
try:
if "alt_title" not in album_methods or not album_dict[album_methods["alt_title"]]:
raise NotFound
album = item.album(album_dict[album_methods["alt_title"]])
title = album_name
except NotFound:
logger.error(f"Metadata Error: Album: {album_name} not found")
continue
if album_name in albums:
album = albums[album_name]
elif "alt_title" in album_methods and album_dict[album_methods["alt_title"]] and album_dict[album_methods["alt_title"]] in albums:
album = albums[album_dict[album_methods["alt_title"]]]
title = album_name
else:
logger.error(f"Metadata Error: Album: {album_name} not found")
continue
if not title:
title = album.title
edits = {}
@ -655,26 +661,24 @@ class MetadataFile(DataFile):
elif not isinstance(album_dict[album_methods["tracks"]], dict):
logger.error("Metadata Error: tracks attribute must be a dictionary")
else:
tracks = {}
for track in album.tracks():
tracks[track.title] = track
tracks[int(track.index)] = track
for track_num, track_dict in album_dict[album_methods["tracks"]].items():
updated = False
title = None
track_methods = {tm.lower(): tm for tm in track_dict}
logger.info("")
logger.info(f"Updating track {track_num} on {album_name} of {mapping_name}...")
try:
if isinstance(track_num, int):
track = album.track(track=track_num)
else:
track = album.track(title=track_num)
except NotFound:
try:
if "alt_title" not in track_methods or not track_dict[track_methods["alt_title"]]:
raise NotFound
track = album.track(title=track_dict[track_methods["alt_title"]])
title = track_num
except NotFound:
logger.error(f"Metadata Error: Track: {track_num} not found")
continue
if track_num in tracks:
track = tracks[track_num]
elif "alt_title" in track_methods and track_dict[track_methods["alt_title"]] and track_dict[track_methods["alt_title"]] in tracks:
track = tracks[track_dict[track_methods["alt_title"]]]
title = track_num
else:
logger.error(f"Metadata Error: Track: {track_num} not found")
continue
if not title:
title = track.title
@ -684,7 +688,7 @@ class MetadataFile(DataFile):
add_edit("track", track, track_dict, track_methods, key="index", var_type="int")
add_edit("disc", track, track_dict, track_methods, key="parentIndex", var_type="int")
add_edit("original_artist", track, track_dict, track_methods, key="originalTitle")
if self.library.edit_item(album, title, "Track", edits):
if self.library.edit_item(track, title, "Track", edits):
updated = True
if self.edit_tags("mood", track, track_dict, track_methods):
updated = True

View file

@ -4,6 +4,7 @@ from modules.library import Library
from modules.util import Failed, ImageData
from PIL import Image
from plexapi import utils
from plexapi.audio import Artist
from plexapi.exceptions import BadRequest, NotFound, Unauthorized
from plexapi.collection import Collection
from plexapi.playlist import Playlist
@ -15,7 +16,7 @@ from xml.etree.ElementTree import ParseError
logger = logging.getLogger("Plex Meta Manager")
builders = ["plex_all", "plex_collectionless", "plex_search"]
builders = ["plex_all", "plex_pilots", "plex_collectionless", "plex_search"]
search_translation = {
"episode_title": "episode.title",
"network": "show.network",
@ -245,7 +246,7 @@ show_only_searches = [
]
string_attributes = ["title", "studio", "episode_title", "artist_title", "album_title", "album_record_label", "track_title"]
float_attributes = [
"user_rating", "episode_user_rating", "critic_rating", "audience_rating",
"user_rating", "episode_user_rating", "critic_rating", "audience_rating", "duration",
"artist_user_rating", "album_user_rating", "album_critic_rating", "track_user_rating"
]
boolean_attributes = [
@ -259,7 +260,7 @@ date_attributes = [
"album_added", "album_released", "track_last_played", "track_last_skipped", "track_last_rated", "track_added"
]
year_attributes = ["decade", "year", "episode_year", "album_year", "album_decade"]
number_attributes = ["plays", "episode_plays", "duration", "tmdb_vote_count", "album_plays", "track_plays", "track_skips"] + year_attributes
number_attributes = ["plays", "episode_plays", "tmdb_vote_count", "album_plays", "track_plays", "track_skips"] + year_attributes
search_display = {"added": "Date Added", "release": "Release Date", "hdr": "HDR", "progress": "In Progress", "episode_progress": "Episode In Progress"}
tag_attributes = [
"actor", "audio_language", "collection", "content_rating", "country", "director", "genre", "label", "network",
@ -552,15 +553,16 @@ class Plex(Library):
try:
names = []
choices = {}
use_title = title and final_search not in ["contentRating", "audioLanguage", "subtitleLanguage", "resolution"]
for choice in self.Plex.listFilterChoices(final_search):
if choice.title not in names:
names.append(choice.title)
if choice.key not in names:
names.append(choice.key)
choices[choice.title] = choice.title if title else choice.key
choices[choice.key] = choice.title if title else choice.key
choices[choice.title.lower()] = choice.title if title else choice.key
choices[choice.key.lower()] = choice.title if title else choice.key
choices[choice.title] = choice.title if use_title else choice.key
choices[choice.key] = choice.title if use_title else choice.key
choices[choice.title.lower()] = choice.title if use_title else choice.key
choices[choice.key.lower()] = choice.title if use_title else choice.key
return choices, names
except NotFound:
logger.debug(f"Search Attribute: {final_search}")
@ -699,6 +701,14 @@ class Plex(Library):
if method == "plex_all":
logger.info(f"Processing Plex All {data.capitalize()}s")
items = self.get_all(collection_level=data)
elif method == "plex_pilots":
logger.info(f"Processing Plex Pilot {data.capitalize()}s")
items = []
for item in self.get_all():
try:
items.append(item.episode(season=1, episode=1))
except NotFound:
logger.warning(f"Plex Warning: {item.title} has no Season 1 Episode 1 ")
elif method == "plex_search":
util.print_multiline(data[1], info=True)
items = self.get_filter_items(data[2])
@ -826,9 +836,9 @@ class Plex(Library):
def find_assets(self, item, name=None, upload=True, overlay=None, folders=None, create=None):
if isinstance(item, Movie):
name = os.path.basename(os.path.dirname(str(item.locations[0])))
elif isinstance(item, Show):
elif isinstance(item, (Artist, Show)):
name = os.path.basename(str(item.locations[0]))
elif isinstance(item, Collection):
elif isinstance(item, (Collection, Playlist)):
name = name if name else item.title
else:
return None, None, None
@ -894,7 +904,7 @@ class Plex(Library):
return poster, background, item_dir
if isinstance(item, Show):
missing_assets = ""
found_season = False
found_image = False
for season in self.query(item.seasons):
season_name = f"Season{'0' if season.seasonNumber < 10 else ''}{season.seasonNumber}"
if item_dir:
@ -908,8 +918,8 @@ class Plex(Library):
matches = util.glob_filter(season_poster_filter)
if len(matches) > 0:
season_poster = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title} Season {season.seasonNumber}'s ", is_url=False)
found_season = True
elif season.seasonNumber > 0:
found_image = True
elif self.show_missing_season_assets and season.seasonNumber > 0:
missing_assets += f"\nMissing Season {season.seasonNumber} Poster"
matches = util.glob_filter(season_background_filter)
if len(matches) > 0:
@ -924,9 +934,38 @@ class Plex(Library):
matches = util.glob_filter(episode_filter)
if len(matches) > 0:
episode_poster = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title} {episode.seasonEpisode.upper()}'s ", is_url=False)
found_image = True
self.upload_images(episode, poster=episode_poster)
if self.show_missing_season_assets and found_season and missing_assets:
util.print_multiline(f"Missing Season Posters for {item.title}{missing_assets}", info=True)
elif self.show_missing_episode_assets:
missing_assets += f"\nMissing {episode.seasonEpisode.upper()} Title Card"
if found_image and missing_assets:
util.print_multiline(f"Missing Posters for {item.title}{missing_assets}", info=True)
if isinstance(item, Artist):
missing_assets = ""
found_album = False
for album in self.query(item.albums):
if item_dir:
album_poster_filter = os.path.join(item_dir, f"{album.title}.*")
album_background_filter = os.path.join(item_dir, f"{album.title}_background.*")
else:
album_poster_filter = os.path.join(ad, f"{name}_{album.title}.*")
album_background_filter = os.path.join(ad, f"{name}_{album.title}_background.*")
album_poster = None
album_background = None
matches = util.glob_filter(album_poster_filter)
if len(matches) > 0:
album_poster = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title} Album {album.title}'s ", is_url=False)
found_album = True
else:
missing_assets += f"\nMissing Album {album.title} Poster"
matches = util.glob_filter(album_background_filter)
if len(matches) > 0:
album_background = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title} Album {album.title}'s ", is_poster=False, is_url=False)
if album_poster or album_background:
self.upload_images(album, poster=album_poster, background=album_background)
if self.show_missing_season_assets and found_album and missing_assets:
util.print_multiline(f"Missing Album Posters for {item.title}{missing_assets}", info=True)
if isinstance(item, (Movie, Show)) and not poster and overlay:
self.upload_images(item, overlay=overlay)

View file

@ -170,7 +170,7 @@ class Radarr:
logger.info(f"Invalid Root Folder for TMDb ID | {tmdb_id:<7} | {path}")
logger.info(f"{len(invalid_root)} Movie{'s' if len(invalid_root) > 1 else ''} with Invalid Paths")
return len(added)
return added
def edit_tags(self, tmdb_ids, tags, apply_tags):
logger.info("")

View file

@ -196,7 +196,7 @@ class Sonarr:
logger.info(f"Invalid Root Folder for TVDb ID | {tvdb_id:<7} | {path}")
logger.info(f"{len(invalid_root)} Series with Invalid Paths")
return len(added)
return added
def edit_tags(self, tvdb_ids, tags, apply_tags):
logger.info("")

View file

@ -1,8 +1,7 @@
import logging, tmdbv3api
import logging
from modules import util
from modules.util import Failed
from retrying import retry
from tmdbv3api.exceptions import TMDbException
from tmdbapis import TMDbAPIs, TMDbException, NotFound
logger = logging.getLogger("Plex Meta Manager")
@ -62,61 +61,42 @@ discover_monetization_types = ["flatrate", "free", "ads", "rent", "buy"]
class TMDb:
def __init__(self, config, params):
self.config = config
self.TMDb = tmdbv3api.TMDb(session=self.config.session)
self.TMDb.api_key = params["apikey"]
self.TMDb.language = params["language"]
try:
response = tmdbv3api.Configuration().info()
if hasattr(response, "status_message"):
raise Failed(f"TMDb Error: {response.status_message}")
except TMDbException as e:
raise Failed(f"TMDb Error: {e}")
self.apikey = params["apikey"]
self.language = params["language"]
self.Movie = tmdbv3api.Movie()
self.TV = tmdbv3api.TV()
self.Discover = tmdbv3api.Discover()
self.Trending = tmdbv3api.Trending()
self.Keyword = tmdbv3api.Keyword()
self.List = tmdbv3api.List()
self.Company = tmdbv3api.Company()
self.Network = tmdbv3api.Network()
self.Collection = tmdbv3api.Collection()
self.Person = tmdbv3api.Person()
self.image_url = "https://image.tmdb.org/t/p/original"
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def convert_from(self, tmdb_id, convert_to, is_movie):
try:
id_to_return = self.Movie.external_ids(tmdb_id)[convert_to] if is_movie else self.TV.external_ids(tmdb_id)[convert_to]
if not id_to_return or (convert_to == "tvdb_id" and id_to_return == 0):
raise Failed(f"TMDb Error: No {convert_to.upper().replace('B_', 'b ')} found for TMDb ID {tmdb_id}")
return id_to_return if convert_to == "imdb_id" else int(id_to_return)
except TMDbException:
raise Failed(f"TMDb Error: TMDb {'Movie' if is_movie else 'Show'} ID: {tmdb_id} not found")
self.TMDb = TMDbAPIs(self.apikey, language=self.language, session=self.config.session)
except TMDbException as e:
raise Failed(f"TMDb Error: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def convert_to(self, external_id, external_source):
return self.Movie.external(external_id=external_id, external_source=external_source)
def convert_from(self, tmdb_id, convert_to, is_movie):
item = self.get_movie(tmdb_id) if is_movie else self.get_show(tmdb_id)
check_id = item.tvdb_id if convert_to == "tvdb_id" and not is_movie else item.imdb_id
if not check_id:
raise Failed(f"TMDb Error: No {convert_to.upper().replace('B_', 'b ')} found for TMDb ID {tmdb_id}")
return check_id
def convert_tvdb_to(self, tvdb_id):
search = self.convert_to(tvdb_id, "tvdb_id")
if len(search["tv_results"]) == 1:
return int(search["tv_results"][0]["id"])
else:
raise Failed(f"TMDb Error: No TMDb ID found for TVDb ID {tvdb_id}")
try:
results = self.TMDb.find_by_id(tvdb_id=tvdb_id)
if results.tv_results:
return results.tv_results[0].id
except NotFound:
pass
raise Failed(f"TMDb Error: No TMDb ID found for TVDb ID {tvdb_id}")
def convert_imdb_to(self, imdb_id):
search = self.convert_to(imdb_id, "imdb_id")
if len(search["movie_results"]) > 0:
return int(search["movie_results"][0]["id"]), "movie"
elif len(search["tv_results"]) > 0:
return int(search["tv_results"][0]["id"]), "show"
elif len(search["tv_episode_results"]) > 0:
item = search['tv_episode_results'][0]
return f"{item['show_id']}_{item['season_number']}_{item['episode_number']}", "episode"
else:
raise Failed(f"TMDb Error: No TMDb ID found for IMDb ID {imdb_id}")
try:
results = self.TMDb.find_by_id(imdb_id=imdb_id)
if results.movie_results:
return results.movie_results[0].id, "movie"
elif results.tv_results:
return results.tv_results[0].id, "show"
elif results.tv_episode_results:
item = results.tv_episode_results[0]
return f"{item.tv_id}_{item.season_number}_{item.episode_number}", "episode"
except NotFound:
pass
raise Failed(f"TMDb Error: No TMDb ID found for IMDb ID {imdb_id}")
def get_movie_show_or_collection(self, tmdb_id, is_movie):
if is_movie:
@ -126,112 +106,38 @@ class TMDb:
except Failed: raise Failed(f"TMDb Error: No Movie or Collection found for TMDb ID {tmdb_id}")
else: return self.get_show(tmdb_id)
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_movie(self, tmdb_id):
try: return self.Movie.details(tmdb_id)
try: return self.TMDb.movie(tmdb_id)
except TMDbException as e: raise Failed(f"TMDb Error: No Movie found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_show(self, tmdb_id):
try: return self.TV.details(tmdb_id)
try: return self.TMDb.tv_show(tmdb_id)
except TMDbException as e: raise Failed(f"TMDb Error: No Show found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_collection(self, tmdb_id):
try: return self.Collection.details(tmdb_id)
try: return self.TMDb.collection(tmdb_id)
except TMDbException as e: raise Failed(f"TMDb Error: No Collection found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_person(self, tmdb_id):
try: return self.Person.details(tmdb_id)
try: return self.TMDb.person(tmdb_id)
except TMDbException as e: raise Failed(f"TMDb Error: No Person found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def _person_credits(self, tmdb_id):
try: return self.Person.combined_credits(tmdb_id)
except TMDbException as e: raise Failed(f"TMDb Error: No Person found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def _company(self, tmdb_id):
try: return self.Company.details(tmdb_id)
try: return self.TMDb.company(tmdb_id)
except TMDbException as e: raise Failed(f"TMDb Error: No Company found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def _network(self, tmdb_id):
try: return self.Network.details(tmdb_id)
try: return self.TMDb.network(tmdb_id)
except TMDbException as e: raise Failed(f"TMDb Error: No Network found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def _keyword(self, tmdb_id):
try: return self.Keyword.details(tmdb_id)
try: return self.TMDb.keyword(tmdb_id)
except TMDbException as e: raise Failed(f"TMDb Error: No Keyword found for TMDb ID {tmdb_id}: {e}")
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_list(self, tmdb_id):
try: return self.List.details(tmdb_id, all_details=True)
try: return self.TMDb.list(tmdb_id)
except TMDbException as e: raise Failed(f"TMDb Error: No List found for TMDb ID {tmdb_id}: {e}")
def _credits(self, tmdb_id, actor=False, crew=False, director=False, producer=False, writer=False):
ids = []
actor_credits = self._person_credits(tmdb_id)
if actor:
for credit in actor_credits.cast:
if credit.media_type == "movie":
ids.append((credit.id, "tmdb"))
elif credit.media_type == "tv":
ids.append((credit.id, "tmdb_show"))
for credit in actor_credits.crew:
if crew or \
(director and credit.department == "Directing") or \
(producer and credit.department == "Production") or \
(writer and credit.department == "Writing"):
if credit.media_type == "movie":
ids.append((credit.id, "tmdb"))
elif credit.media_type == "tv":
ids.append((credit.id, "tmdb_show"))
return ids
def _pagenation(self, method, amount, is_movie):
ids = []
for x in range(int(amount / 20) + 1):
if method == "tmdb_popular": tmdb_items = self.Movie.popular(x + 1) if is_movie else self.TV.popular(x + 1)
elif method == "tmdb_top_rated": tmdb_items = self.Movie.top_rated(x + 1) if is_movie else self.TV.top_rated(x + 1)
elif method == "tmdb_now_playing" and is_movie: tmdb_items = self.Movie.now_playing(x + 1)
elif method == "tmdb_trending_daily": tmdb_items = self.Trending.movie_day(x + 1) if is_movie else self.Trending.tv_day(x + 1)
elif method == "tmdb_trending_weekly": tmdb_items = self.Trending.movie_week(x + 1) if is_movie else self.Trending.tv_week(x + 1)
else: raise Failed(f"TMDb Error: {method} method not supported")
for tmdb_item in tmdb_items:
try:
ids.append((tmdb_item.id, "tmdb" if is_movie else "tmdb_show"))
except Failed as e:
logger.error(e)
if len(ids) == amount: break
if len(ids) == amount: break
return ids
def _discover(self, attrs, amount, is_movie):
ids = []
for date_attr in discover_dates:
if date_attr in attrs:
attrs[date_attr] = util.validate_date(attrs[date_attr], f"tmdb_discover attribute {date_attr}", return_as="%Y-%m-%d")
if self.config.trace_mode:
logger.debug(f"Params: {attrs}")
self.Discover.discover_movies(attrs) if is_movie else self.Discover.discover_tv_shows(attrs)
total_pages = int(self.TMDb.total_pages)
total_results = int(self.TMDb.total_results)
amount = total_results if amount == 0 or total_results < amount else amount
for x in range(total_pages):
attrs["page"] = x + 1
tmdb_items = self.Discover.discover_movies(attrs) if is_movie else self.Discover.discover_tv_shows(attrs)
for tmdb_item in tmdb_items:
try:
ids.append((tmdb_item.id, "tmdb" if is_movie else "tmdb_show"))
except Failed as e:
logger.error(e)
if len(ids) == amount: break
if len(ids) == amount: break
return ids, amount
def validate_tmdb_ids(self, tmdb_ids, tmdb_method):
tmdb_list = util.get_int_list(tmdb_ids, f"TMDb {type_map[tmdb_method]} ID")
tmdb_values = []
@ -249,74 +155,86 @@ class TMDb:
elif tmdb_type == "Person": self.get_person(tmdb_id)
elif tmdb_type == "Company": self._company(tmdb_id)
elif tmdb_type == "Network": self._network(tmdb_id)
elif tmdb_type == "Keyword": self._keyword(tmdb_id)
elif tmdb_type == "List": self.get_list(tmdb_id)
return tmdb_id
def get_tmdb_ids(self, method, data, is_movie):
pretty = method.replace("_", " ").title().replace("Tmdb", "TMDb")
media_type = "Movie" if is_movie else "Show"
result_type = "tmdb" if is_movie else "tmdb_show"
ids = []
if method in ["tmdb_discover", "tmdb_company", "tmdb_keyword"] or (method == "tmdb_network" and not is_movie):
attrs = None
tmdb_id = ""
tmdb_name = ""
if method in ["tmdb_company", "tmdb_network", "tmdb_keyword"]:
tmdb_id = int(data)
if method == "tmdb_company":
tmdb_name = str(self._company(tmdb_id).name)
attrs = {"with_companies": tmdb_id}
elif method == "tmdb_network":
tmdb_name = str(self._network(tmdb_id).name)
attrs = {"with_networks": tmdb_id}
elif method == "tmdb_keyword":
tmdb_name = str(self._keyword(tmdb_id).name)
attrs = {"with_keywords": tmdb_id}
limit = 0
if method in ["tmdb_network", "tmdb_company", "tmdb_keyword"]:
if method == "tmdb_company":
item = self._company(int(data))
elif method == "tmdb_network":
item = self._network(int(data))
else:
attrs = data.copy()
limit = int(attrs.pop("limit"))
ids, amount = self._discover(attrs, limit, is_movie)
if method in ["tmdb_company", "tmdb_network", "tmdb_keyword"]:
logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({amount} {media_type}{'' if amount == 1 else 's'})")
elif method == "tmdb_discover":
logger.info(f"Processing {pretty}: {amount} {media_type}{'' if amount == 1 else 's'}")
for attr, value in attrs.items():
logger.info(f" {attr}: {value}")
item = self._keyword(int(data))
results = item.movies if is_movie else item.tv_shows
ids = [(i.id, result_type) for i in results.get_results(results.total_results)]
logger.info(f"Processing {pretty}: ({data}) {item.name} ({len(results)} {media_type}{'' if len(results) == 1 else 's'})")
elif method == "tmdb_discover":
attrs = data.copy()
limit = int(attrs.pop("limit"))
for date_attr in discover_dates:
if date_attr in attrs:
attrs[date_attr] = util.validate_date(attrs[date_attr], f"tmdb_discover attribute {date_attr}", return_as="%Y-%m-%d")
if self.config.trace_mode:
logger.debug(f"Params: {attrs}")
results = self.TMDb.discover_movies(**attrs) if is_movie else self.TMDb.discover_tv_shows(**attrs)
amount = results.total_results if limit == 0 or results.total_results < limit else limit
ids = [(i.id, result_type) for i in results.get_results(amount)]
logger.info(f"Processing {pretty}: {amount} {media_type}{'' if amount == 1 else 's'}")
for attr, value in attrs.items():
logger.info(f" {attr}: {value}")
elif method in ["tmdb_popular", "tmdb_top_rated", "tmdb_now_playing", "tmdb_trending_daily", "tmdb_trending_weekly"]:
ids = self._pagenation(method, data, is_movie)
if method == "tmdb_popular":
results = self.TMDb.popular_movies() if is_movie else self.TMDb.popular_tv()
elif method == "tmdb_top_rated":
results = self.TMDb.top_rated_movies() if is_movie else self.TMDb.top_rated_tv()
elif method == "tmdb_now_playing":
results = self.TMDb.now_playing_movies()
else:
results = self.TMDb.trending("movie" if is_movie else "tv", "day" if method == "tmdb_trending_daily" else "week")
ids = [(i.id, result_type) for i in results.get_results(data)]
logger.info(f"Processing {pretty}: {data} {media_type}{'' if data == 1 else 's'}")
else:
tmdb_id = int(data)
if method == "tmdb_list":
tmdb_list = self.get_list(tmdb_id)
tmdb_name = tmdb_list.name
for tmdb_item in tmdb_list.items:
if tmdb_item.media_type == "movie":
ids.append((tmdb_item.id, "tmdb"))
elif tmdb_item.media_type == "tv":
try:
ids.append((tmdb_item.id, "tmdb_show"))
except Failed:
pass
results = self.get_list(tmdb_id)
tmdb_name = results.name
ids = [(i.id, result_type) for i in results.get_results(results.total_results)]
elif method == "tmdb_movie":
tmdb_name = str(self.get_movie(tmdb_id).title)
tmdb_name = self.get_movie(tmdb_id).title
ids.append((tmdb_id, "tmdb"))
elif method == "tmdb_collection":
tmdb_items = self.get_collection(tmdb_id)
tmdb_name = str(tmdb_items.name)
for tmdb_item in tmdb_items.parts:
ids.append((tmdb_item["id"], "tmdb"))
collection = self.get_collection(tmdb_id)
tmdb_name = collection.name
ids = [(t.id, "tmdb") for t in collection.movies]
elif method == "tmdb_show":
tmdb_name = str(self.get_show(tmdb_id).name)
tmdb_name = self.get_show(tmdb_id).name
ids.append((tmdb_id, "tmdb_show"))
else:
tmdb_name = str(self.get_person(tmdb_id).name)
if method == "tmdb_actor": ids = self._credits(tmdb_id, actor=True)
elif method == "tmdb_director": ids = self._credits(tmdb_id, director=True)
elif method == "tmdb_producer": ids = self._credits(tmdb_id, producer=True)
elif method == "tmdb_writer": ids = self._credits(tmdb_id, writer=True)
elif method == "tmdb_crew": ids = self._credits(tmdb_id, crew=True)
else: raise Failed(f"TMDb Error: Method {method} not supported")
person = self.get_person(tmdb_id)
tmdb_name = person.name
if method == "tmdb_actor":
ids = [(i.movie.id, "tmdb") for i in person.movie_cast]
ids.extend([(i.tv_show.id, "tmdb_show") for i in person.tv_cast])
elif method == "tmdb_crew":
ids = [(i.movie.id, "tmdb") for i in person.movie_crew]
ids.extend([(i.tv_show.id, "tmdb_show") for i in person.tv_crew])
elif method == "tmdb_director":
ids = [(i.movie.id, "tmdb") for i in person.movie_crew if i.department == "Directing"]
ids.extend([(i.tv_show.id, "tmdb_show") for i in person.tv_crew if i.department == "Directing"])
elif method == "tmdb_writer":
ids = [(i.movie.id, "tmdb") for i in person.movie_crew if i.department == "Writing"]
ids.extend([(i.tv_show.id, "tmdb_show") for i in person.tv_crew if i.department == "Writing"])
elif method == "tmdb_producer":
ids = [(i.movie.id, "tmdb") for i in person.movie_crew if i.department == "Production"]
ids.extend([(i.tv_show.id, "tmdb_show") for i in person.tv_crew if i.department == "Production"])
else:
raise Failed(f"TMDb Error: Method {method} not supported")
if len(ids) > 0:
logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({len(ids)} Item{'' if len(ids) == 1 else 's'})")
return ids

View file

@ -73,7 +73,8 @@ class Webhooks:
if playlist: json["playlist"] = str(playlist)
self._request(self.error_webhooks, json)
def collection_hooks(self, webhooks, collection, poster_url=None, background_url=None, created=False, deleted=False, additions=None, removals=None, playlist=False):
def collection_hooks(self, webhooks, collection, poster_url=None, background_url=None, created=False, deleted=False,
additions=None, removals=None, radarr=None, sonarr=None, playlist=False):
if self.library:
thumb = None
if not poster_url and collection.thumb and next((f for f in collection.fields if f.name == "thumb"), None):
@ -93,4 +94,6 @@ class Webhooks:
"background_url": background_url,
"additions": additions if additions else [],
"removals": removals if removals else [],
"radarr_adds": radarr if radarr else [],
"sonarr_adds": sonarr if sonarr else [],
})

View file

@ -34,6 +34,7 @@ parser.add_argument("-lo", "--library-only", "--libraries-only", dest="library_o
parser.add_argument("-lf", "--library-first", "--libraries-first", dest="library_first", help="Run library operations before collections", action="store_true", default=False)
parser.add_argument("-rc", "-cl", "--collection", "--collections", "--run-collection", "--run-collections", dest="collections", help="Process only specified collections (comma-separated list)", type=str)
parser.add_argument("-rl", "-l", "--library", "--libraries", "--run-library", "--run-libraries", dest="libraries", help="Process only specified libraries (comma-separated list)", type=str)
parser.add_argument("-rm", "-m", "--metadata", "--metadata-files", "--run-metadata-files", dest="metadata", help="Process only specified Metadata files (comma-separated list)", type=str)
parser.add_argument("-dc", "--delete", "--delete-collections", dest="delete", help="Deletes all Collections in the Plex Library before running", action="store_true", default=False)
parser.add_argument("-nc", "--no-countdown", dest="no_countdown", help="Run without displaying the countdown", action="store_true", default=False)
parser.add_argument("-nm", "--no-missing", dest="no_missing", help="Run without running the missing section", action="store_true", default=False)
@ -69,6 +70,7 @@ library_only = get_arg("PMM_LIBRARIES_ONLY", args.library_only, arg_bool=True)
library_first = get_arg("PMM_LIBRARIES_FIRST", args.library_first, arg_bool=True)
collections = get_arg("PMM_COLLECTIONS", args.collections)
libraries = get_arg("PMM_LIBRARIES", args.libraries)
metadata_files = get_arg("PMM_METADATA_FILES", args.metadata)
delete = get_arg("PMM_DELETE_COLLECTIONS", args.delete, arg_bool=True)
resume = get_arg("PMM_RESUME", args.resume)
no_countdown = get_arg("PMM_NO_COUNTDOWN", args.no_countdown, arg_bool=True)
@ -158,6 +160,7 @@ def start(attrs):
logger.debug(f"--libraries-first (PMM_LIBRARIES_FIRST): {library_first}")
logger.debug(f"--run-collections (PMM_COLLECTIONS): {collections}")
logger.debug(f"--run-libraries (PMM_LIBRARIES): {libraries}")
logger.debug(f"--run-metadata-files (PMM_METADATA_FILES): {metadata_files}")
logger.debug(f"--ignore-schedules (PMM_IGNORE_SCHEDULES): {ignore_schedules}")
logger.debug(f"--delete-collections (PMM_DELETE_COLLECTIONS): {delete}")
logger.debug(f"--resume (PMM_RESUME): {resume}")
@ -258,8 +261,11 @@ def update_libraries(config):
logger.info("")
library.map_guids()
for metadata in library.metadata_files:
metadata_name = metadata.get_file_name()
if config.requested_metadata_files and metadata_name not in config.requested_metadata_files:
continue
logger.info("")
util.separator(f"Running Metadata File\n{metadata.path}")
util.separator(f"Running {metadata_name} Metadata File\n{metadata.path}")
if not config.test_mode and not config.resume_from and not collection_only:
try:
metadata.update_metadata()
@ -439,7 +445,7 @@ def library_operations(config, library):
logger.debug(f"TMDb Collections: {library.tmdb_collections}")
logger.debug(f"Genre Collections: {library.genre_collections}")
logger.debug(f"Genre Mapper: {library.genre_mapper}")
logger.debug(f"TMDb Operation: {library.tmdb_library_operation}")
logger.debug(f"TMDb Operation: {library.items_library_operation}")
if library.split_duplicates:
items = library.search(**{"duplicate": True})
@ -448,7 +454,7 @@ def library_operations(config, library):
logger.info(util.adjust_space(f"{item.title[:25]:<25} | Splitting"))
tmdb_collections = {}
if library.tmdb_library_operation:
if library.items_library_operation:
items = library.get_all()
radarr_adds = []
sonarr_adds = []
@ -544,8 +550,8 @@ def library_operations(config, library):
else:
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No TVDb ID for Guid: {item.guid}"))
if library.tmdb_collections and tmdb_item and tmdb_item.belongs_to_collection:
tmdb_collections[tmdb_item.belongs_to_collection.id] = tmdb_item.belongs_to_collection.name
if library.tmdb_collections and tmdb_item and tmdb_item.collection:
tmdb_collections[tmdb_item.collection.id] = tmdb_item.collection.name
if library.mass_genre_update:
try:
@ -600,7 +606,8 @@ def library_operations(config, library):
for genre in item.genres:
if genre.tag in library.genre_mapper:
deletes.append(genre.tag)
adds.append(library.genre_mapper[genre.tag])
if library.genre_mapper[genre.tag]:
adds.append(library.genre_mapper[genre.tag])
library.edit_tags("genre", item, add_tags=adds, remove_tags=deletes)
except Failed:
pass
@ -652,6 +659,8 @@ def library_operations(config, library):
new_collections[title] = {"template": template}
metadata = MetadataFile(config, library, "Data", {"collections": new_collections, "templates": templates})
if metadata.collections:
library.collections.extend([c for c in metadata.collections])
run_collection(config, library, metadata, metadata.get_collections(None))
if library.radarr_remove_by_tag:
@ -945,9 +954,11 @@ def run_playlists(config):
else:
server_check = pl_library.PlexServer.machineIdentifier
sync_to_users = config.general["playlist_sync_to_user"]
sync_to_users = config.general["playlist_sync_to_users"]
if "sync_to_users" in playlist_attrs:
sync_to_users = playlist_attrs["sync_to_users"]
elif "sync_to_user" in playlist_attrs:
sync_to_users = playlist_attrs["sync_to_user"]
else:
logger.warning(f"Playlist Error: sync_to_users attribute not found defaulting to playlist_sync_to_user: {sync_to_users}")
@ -992,7 +1003,16 @@ def run_playlists(config):
logger.debug(f"Builder: {method}: {value}")
logger.info("")
items = []
ids = builder.gather_ids(method, value)
if "plex" in method:
ids = []
for pl_library in pl_libraries:
ids.extend(pl_library.get_rating_keys(method, value))
elif "tautulli" in method:
ids = []
for pl_library in pl_libraries:
ids.extend(pl_library.Tautulli.get_rating_keys(pl_library, value, True))
else:
ids = builder.gather_ids(method, value)
if len(ids) > 0:
total_ids = len(ids)
@ -1052,7 +1072,7 @@ def run_playlists(config):
try:
input_id = config.Convert.tmdb_to_tvdb(input_id, fail=True)
except Failed as e:
logger.error(e)
logger.warning(e)
continue
if input_id not in builder.ignore_ids:
found = False
@ -1101,7 +1121,7 @@ def run_playlists(config):
if tvdb_id not in builder.missing_shows:
builder.missing_shows.append(tvdb_id)
except Failed as e:
logger.error(e)
logger.warning(e)
continue
if not isinstance(rating_keys, list):
rating_keys = [rating_keys]
@ -1205,7 +1225,7 @@ def run_playlists(config):
return status, stats
try:
if run or test or collections or libraries or resume:
if run or test or collections or libraries or metadata_files or resume:
start({
"config_file": config_file,
"test": test,
@ -1213,6 +1233,7 @@ try:
"ignore_schedules": ignore_schedules,
"collections": collections,
"libraries": libraries,
"metadata_files": metadata_files,
"library_first": library_first,
"resume": resume,
"trace": trace

View file

@ -1,5 +1,5 @@
PlexAPI==4.8.0
tmdbv3api==1.7.6
PlexAPI==4.9.1
tmdbapis==0.1.8
arrapi==1.3.0
lxml==4.7.1
requests==2.27.1