mirror of
https://github.com/meisnate12/Plex-Meta-Manager
synced 2024-11-10 06:54:21 +00:00
updated how the log looks
This commit is contained in:
parent
e946a4b6af
commit
de4515126a
16 changed files with 123 additions and 70 deletions
|
@ -49,7 +49,6 @@ class AniDBAPI:
|
|||
|
||||
def get_items(self, method, data, language):
|
||||
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||
logger.debug(f"Data: {data}")
|
||||
anidb_ids = []
|
||||
if method == "anidb_popular":
|
||||
logger.info(f"Processing {pretty}: {data} Anime")
|
||||
|
@ -60,6 +59,7 @@ class AniDBAPI:
|
|||
elif method == "anidb_relation": anidb_ids.extend(self._relations(data, language))
|
||||
else: raise Failed(f"AniDB Error: Method {method} not supported")
|
||||
movie_ids, show_ids = self.config.Convert.anidb_to_ids(anidb_ids)
|
||||
logger.debug("")
|
||||
logger.debug(f"AniDB IDs Found: {anidb_ids}")
|
||||
logger.debug(f"TMDb IDs Found: {movie_ids}")
|
||||
logger.debug(f"TVDb IDs Found: {show_ids}")
|
||||
|
|
|
@ -218,7 +218,6 @@ class AniListAPI:
|
|||
raise Failed(f"AniList Error: No valid AniList IDs in {anilist_ids}")
|
||||
|
||||
def get_items(self, method, data):
|
||||
logger.debug(f"Data: {data}")
|
||||
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||
if method == "anilist_id":
|
||||
anilist_id, name = self._validate(data)
|
||||
|
@ -243,6 +242,7 @@ class AniListAPI:
|
|||
else:
|
||||
raise Failed(f"AniList Error: Method {method} not supported")
|
||||
movie_ids, show_ids = self.config.Convert.anilist_to_ids(anilist_ids)
|
||||
logger.debug("")
|
||||
logger.debug(f"AniList IDs Found: {anilist_ids}")
|
||||
logger.debug(f"Shows Found: {show_ids}")
|
||||
logger.debug(f"Movies Found: {movie_ids}")
|
||||
|
|
|
@ -384,8 +384,6 @@ class CollectionBuilder:
|
|||
if skip_collection:
|
||||
raise Failed(f"{self.schedule}\n\nCollection {self.name} not scheduled to run")
|
||||
|
||||
logger.info(f"Scanning {self.name} Collection")
|
||||
|
||||
self.run_again = "run_again" in methods
|
||||
self.collectionless = "plex_collectionless" in methods
|
||||
|
||||
|
@ -432,8 +430,9 @@ class CollectionBuilder:
|
|||
else:
|
||||
raise Failed("Collection Error: smart_url attribute is blank")
|
||||
|
||||
self.smart_filter_details = ""
|
||||
if "smart_filter" in methods:
|
||||
logger.info("")
|
||||
filter_details = "\n"
|
||||
smart_filter = self.data[methods["smart_filter"]]
|
||||
if smart_filter is None:
|
||||
raise Failed(f"Collection Error: smart_filter attribute is blank")
|
||||
|
@ -453,7 +452,7 @@ class CollectionBuilder:
|
|||
smart_type = "shows"
|
||||
else:
|
||||
smart_type = "movies"
|
||||
logger.info(f"Smart {smart_type.capitalize()[:-1]} Filter")
|
||||
filter_details += f"Smart {smart_type.capitalize()[:-1]} Filter\n"
|
||||
self.smart_type_key, smart_sorts = plex.smart_types[smart_type]
|
||||
|
||||
smart_sort = "random"
|
||||
|
@ -463,7 +462,7 @@ class CollectionBuilder:
|
|||
if smart_filter[smart_methods["sort_by"]] not in smart_sorts:
|
||||
raise Failed(f"Collection Error: sort_by: {smart_filter[smart_methods['sort_by']]} is invalid")
|
||||
smart_sort = smart_filter[smart_methods["sort_by"]]
|
||||
logger.info(f"Sort By: {smart_sort}")
|
||||
filter_details += f"Sort By: {smart_sort}\n"
|
||||
|
||||
limit = None
|
||||
if "limit" in smart_methods:
|
||||
|
@ -472,7 +471,7 @@ class CollectionBuilder:
|
|||
if not isinstance(smart_filter[smart_methods["limit"]], int) or smart_filter[smart_methods["limit"]] < 1:
|
||||
raise Failed("Collection Error: limit attribute must be an integer greater then 0")
|
||||
limit = smart_filter[smart_methods["limit"]]
|
||||
logger.info(f"Limit: {limit}")
|
||||
filter_details += f"Limit: {limit}\n"
|
||||
|
||||
validate = True
|
||||
if "validate" in smart_methods:
|
||||
|
@ -481,7 +480,7 @@ class CollectionBuilder:
|
|||
if not isinstance(smart_filter[smart_methods["validate"]], bool):
|
||||
raise Failed("Collection Error: validate attribute must be either true or false")
|
||||
validate = smart_filter[smart_methods["validate"]]
|
||||
logger.info(f"Validate: {validate}")
|
||||
filter_details += f"Validate: {validate}\n"
|
||||
|
||||
def _filter(filter_dict, fail, is_all=True, level=1):
|
||||
output = ""
|
||||
|
@ -590,7 +589,7 @@ class CollectionBuilder:
|
|||
if not isinstance(smart_filter[smart_methods[base]], dict):
|
||||
raise Failed(f"Collection Error: {base} must be a dictionary: {smart_filter[smart_methods[base]]}")
|
||||
built_filter, filter_text = _filter(smart_filter[smart_methods[base]], validate, is_all=base_all)
|
||||
util.print_multiline(f"Filter:{filter_text}")
|
||||
self.smart_filter_details = f"{filter_details}Filter:{filter_text}"
|
||||
final_filter = built_filter[:-1] if base_all else f"push=1&{built_filter}pop=1"
|
||||
self.smart_url = f"?type={self.smart_type_key}&{f'limit={limit}&' if limit else ''}sort={smart_sorts[smart_sort]}&{final_filter}"
|
||||
|
||||
|
@ -1277,6 +1276,8 @@ class CollectionBuilder:
|
|||
else:
|
||||
self.sync = False
|
||||
self.run_again = False
|
||||
logger.info("")
|
||||
logger.info("Validation Successful")
|
||||
|
||||
def collect_rating_keys(self, movie_map, show_map):
|
||||
def add_rating_keys(keys):
|
||||
|
@ -1442,10 +1443,11 @@ class CollectionBuilder:
|
|||
elif self.details["show_filtered"] is True:
|
||||
logger.info(f"{name} Collection | X | {current.title}")
|
||||
media_type = f"{'Movie' if self.library.is_movie else 'Show'}{'s' if total > 1 else ''}"
|
||||
logger.info(util.adjust_space(length, f"{total} {media_type} Processed"))
|
||||
util.print_end(length)
|
||||
logger.info("")
|
||||
logger.info(f"{total} {media_type} Processed")
|
||||
|
||||
def run_missing(self):
|
||||
logger.info("")
|
||||
arr_filters = []
|
||||
for filter_method, filter_data in self.filters:
|
||||
if (filter_method.startswith("original_language") and self.library.is_movie) or filter_method.startswith("tmdb_vote_count"):
|
||||
|
@ -1472,6 +1474,7 @@ class CollectionBuilder:
|
|||
logger.info(f"{self.name} Collection | ? | {movie.title} (TMDb: {missing_id})")
|
||||
elif self.details["show_filtered"] is True:
|
||||
logger.info(f"{self.name} Collection | X | {movie.title} (TMDb: {missing_id})")
|
||||
logger.info("")
|
||||
logger.info(f"{len(missing_movies_with_names)} Movie{'s' if len(missing_movies_with_names) > 1 else ''} Missing")
|
||||
if self.details["save_missing"] is True:
|
||||
self.library.add_missing(self.name, missing_movies_with_names, True)
|
||||
|
@ -1506,6 +1509,7 @@ class CollectionBuilder:
|
|||
logger.info(f"{self.name} Collection | ? | {title} (TVDB: {missing_id})")
|
||||
elif self.details["show_filtered"] is True:
|
||||
logger.info(f"{self.name} Collection | X | {title} (TVDb: {missing_id})")
|
||||
logger.info("")
|
||||
logger.info(f"{len(missing_shows_with_names)} Show{'s' if len(missing_shows_with_names) > 1 else ''} Missing")
|
||||
if self.details["save_missing"] is True:
|
||||
self.library.add_missing(self.name, missing_shows_with_names, False)
|
||||
|
@ -1520,16 +1524,21 @@ class CollectionBuilder:
|
|||
self.run_again_shows.extend(missing_tvdb_ids)
|
||||
|
||||
def sync_collection(self):
|
||||
logger.info("")
|
||||
count_removed = 0
|
||||
for ratingKey, item in self.plex_map.items():
|
||||
if item is not None:
|
||||
if count_removed == 0:
|
||||
logger.info("")
|
||||
util.separator(f"Removed from {self.name} Collection", space=False, border=False)
|
||||
logger.info("")
|
||||
logger.info(f"{self.name} Collection | - | {item.title}")
|
||||
if self.smart_label_collection:
|
||||
self.library.query_data(item.removeLabel, self.name)
|
||||
else:
|
||||
self.library.query_data(item.removeCollection, self.name)
|
||||
count_removed += 1
|
||||
if count_removed > 0:
|
||||
logger.info("")
|
||||
logger.info(f"{count_removed} {'Movie' if self.library.is_movie else 'Show'}{'s' if count_removed == 1 else ''} Removed")
|
||||
|
||||
def update_details(self):
|
||||
|
@ -1644,9 +1653,6 @@ class CollectionBuilder:
|
|||
except BadRequest:
|
||||
logger.error(f"Detail: {image_method} failed to update {message}")
|
||||
|
||||
if len(self.posters) > 0:
|
||||
logger.info("")
|
||||
|
||||
if len(self.posters) > 1:
|
||||
logger.info(f"{len(self.posters)} posters found:")
|
||||
for p in self.posters:
|
||||
|
@ -1671,9 +1677,6 @@ class CollectionBuilder:
|
|||
elif "tmdb_show_details" in self.posters: set_image("tmdb_show_details", self.posters)
|
||||
else: logger.info("No poster to update")
|
||||
|
||||
if len(self.backgrounds) > 0:
|
||||
logger.info("")
|
||||
|
||||
if len(self.backgrounds) > 1:
|
||||
logger.info(f"{len(self.backgrounds)} backgrounds found:")
|
||||
for b in self.backgrounds:
|
||||
|
|
|
@ -318,14 +318,17 @@ class Config:
|
|||
continue
|
||||
util.separator()
|
||||
params = {}
|
||||
logger.info("")
|
||||
params["mapping_name"] = str(library_name)
|
||||
if lib and "library_name" in lib and lib["library_name"]:
|
||||
params["name"] = str(lib["library_name"])
|
||||
logger.info(f"Connecting to {params['name']} ({library_name}) Library...")
|
||||
display_name = f"{params['name']} ({params['mapping_name']})"
|
||||
else:
|
||||
params["name"] = str(library_name)
|
||||
logger.info(f"Connecting to {params['name']} Library...")
|
||||
params["mapping_name"] = str(library_name)
|
||||
params["name"] = params["mapping_name"]
|
||||
display_name = params["mapping_name"]
|
||||
|
||||
util.separator(f"{display_name} Configuration")
|
||||
logger.info("")
|
||||
logger.info(f"Connecting to {display_name} Library...")
|
||||
|
||||
params["asset_directory"] = check_for_attribute(lib, "asset_directory", parent="settings", var_type="list_path", default=self.general["asset_directory"], default_is_none=True, save=False)
|
||||
if params["asset_directory"] is None:
|
||||
|
@ -436,15 +439,19 @@ class Config:
|
|||
params["plex"]["empty_trash"] = check_for_attribute(lib, "empty_trash", parent="plex", var_type="bool", default=self.general["plex"]["empty_trash"], save=False)
|
||||
params["plex"]["optimize"] = check_for_attribute(lib, "optimize", parent="plex", var_type="bool", default=self.general["plex"]["optimize"], save=False)
|
||||
library = PlexAPI(params, self.TMDb, self.TVDb)
|
||||
logger.info(f"{params['name']} Library Connection Successful")
|
||||
logger.info("")
|
||||
logger.info(f"{display_name} Library Connection Successful")
|
||||
except Failed as e:
|
||||
util.print_multiline(e, error=True)
|
||||
logger.info(f"{params['name']} Library Connection Failed")
|
||||
logger.info(f"{display_name} Library Connection Failed")
|
||||
continue
|
||||
|
||||
if self.general["radarr"]["url"] or (lib and "radarr" in lib):
|
||||
logger.info("")
|
||||
logger.info(f"Connecting to {params['name']} library's Radarr...")
|
||||
util.separator("Radarr Configuration", space=False, border=False)
|
||||
logger.info("")
|
||||
logger.info(f"Connecting to {display_name} library's Radarr...")
|
||||
logger.info("")
|
||||
radarr_params = {}
|
||||
try:
|
||||
radarr_params["url"] = check_for_attribute(lib, "url", parent="radarr", default=self.general["radarr"]["url"], req_default=True, save=False)
|
||||
|
@ -460,11 +467,15 @@ class Config:
|
|||
library.Radarr = RadarrAPI(radarr_params)
|
||||
except Failed as e:
|
||||
util.print_multiline(e, error=True)
|
||||
logger.info(f"{params['name']} library's Radarr Connection {'Failed' if library.Radarr is None else 'Successful'}")
|
||||
logger.info("")
|
||||
logger.info(f"{display_name} library's Radarr Connection {'Failed' if library.Radarr is None else 'Successful'}")
|
||||
|
||||
if self.general["sonarr"]["url"] or (lib and "sonarr" in lib):
|
||||
logger.info("")
|
||||
logger.info(f"Connecting to {params['name']} library's Sonarr...")
|
||||
util.separator("Sonarr Configuration", space=False, border=False)
|
||||
logger.info("")
|
||||
logger.info(f"Connecting to {display_name} library's Sonarr...")
|
||||
logger.info("")
|
||||
sonarr_params = {}
|
||||
try:
|
||||
sonarr_params["url"] = check_for_attribute(lib, "url", parent="sonarr", default=self.general["sonarr"]["url"], req_default=True, save=False)
|
||||
|
@ -486,11 +497,15 @@ class Config:
|
|||
library.Sonarr = SonarrAPI(sonarr_params, library.Plex.language)
|
||||
except Failed as e:
|
||||
util.print_multiline(e, error=True)
|
||||
logger.info(f"{params['name']} library's Sonarr Connection {'Failed' if library.Sonarr is None else 'Successful'}")
|
||||
logger.info("")
|
||||
logger.info(f"{display_name} library's Sonarr Connection {'Failed' if library.Sonarr is None else 'Successful'}")
|
||||
|
||||
if self.general["tautulli"]["url"] or (lib and "tautulli" in lib):
|
||||
logger.info("")
|
||||
logger.info(f"Connecting to {params['name']} library's Tautulli...")
|
||||
util.separator("Tautulli Configuration", space=False, border=False)
|
||||
logger.info("")
|
||||
logger.info(f"Connecting to {display_name} library's Tautulli...")
|
||||
logger.info("")
|
||||
tautulli_params = {}
|
||||
try:
|
||||
tautulli_params["url"] = check_for_attribute(lib, "url", parent="tautulli", default=self.general["tautulli"]["url"], req_default=True, save=False)
|
||||
|
@ -498,7 +513,8 @@ class Config:
|
|||
library.Tautulli = TautulliAPI(tautulli_params)
|
||||
except Failed as e:
|
||||
util.print_multiline(e, error=True)
|
||||
logger.info(f"{params['name']} library's Tautulli Connection {'Failed' if library.Tautulli is None else 'Successful'}")
|
||||
logger.info("")
|
||||
logger.info(f"{display_name} library's Tautulli Connection {'Failed' if library.Tautulli is None else 'Successful'}")
|
||||
|
||||
logger.info("")
|
||||
self.libraries.append(library)
|
||||
|
|
|
@ -93,14 +93,13 @@ class IMDbAPI:
|
|||
|
||||
def get_items(self, method, data, language, is_movie):
|
||||
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||
logger.debug(f"Data: {data}")
|
||||
show_ids = []
|
||||
movie_ids = []
|
||||
def run_convert(imdb_id):
|
||||
tvdb_id = self.config.Convert.imdb_to_tvdb(imdb_id) if not is_movie else None
|
||||
tmdb_id = self.config.Convert.imdb_to_tmdb(imdb_id) if tvdb_id is None else None
|
||||
if not tmdb_id and not tvdb_id:
|
||||
logger.error(f"Convert Error: No TMDb ID or TVDb ID found for IMDb: {imdb_id}")
|
||||
logger.error(f"Convert Error: No {'' if is_movie else 'TVDb ID or '}TMDb ID found for IMDb: {imdb_id}")
|
||||
if tmdb_id: movie_ids.append(tmdb_id)
|
||||
if tvdb_id: show_ids.append(tvdb_id)
|
||||
|
||||
|
@ -119,6 +118,7 @@ class IMDbAPI:
|
|||
logger.info(util.adjust_space(length, f"Processed {total_ids} IMDb IDs"))
|
||||
else:
|
||||
raise Failed(f"IMDb Error: Method {method} not supported")
|
||||
logger.debug("")
|
||||
logger.debug(f"TMDb IDs Found: {movie_ids}")
|
||||
logger.debug(f"TVDb IDs Found: {show_ids}")
|
||||
return movie_ids, show_ids
|
||||
|
|
|
@ -69,5 +69,6 @@ class LetterboxdAPI:
|
|||
logger.info(util.adjust_space(length, f"Processed {total_items} TMDb IDs"))
|
||||
else:
|
||||
logger.error(f"Letterboxd Error: No List Items found in {data}")
|
||||
logger.debug("")
|
||||
logger.debug(f"TMDb IDs Found: {movie_ids}")
|
||||
return movie_ids, []
|
||||
|
|
|
@ -194,7 +194,6 @@ class MyAnimeListAPI:
|
|||
return self._parse_request(url)
|
||||
|
||||
def get_items(self, method, data):
|
||||
logger.debug(f"Data: {data}")
|
||||
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||
if method == "mal_id":
|
||||
mal_ids = [data]
|
||||
|
@ -214,6 +213,7 @@ class MyAnimeListAPI:
|
|||
else:
|
||||
raise Failed(f"MyAnimeList Error: Method {method} not supported")
|
||||
movie_ids, show_ids = self.config.Convert.myanimelist_to_ids(mal_ids)
|
||||
logger.debug("")
|
||||
logger.debug(f"MyAnimeList IDs Found: {mal_ids}")
|
||||
logger.debug(f"Shows Found: {show_ids}")
|
||||
logger.debug(f"Movies Found: {movie_ids}")
|
||||
|
|
|
@ -66,11 +66,11 @@ class Metadata:
|
|||
return self.collections
|
||||
|
||||
def update_metadata(self, TMDb, test):
|
||||
logger.info("")
|
||||
util.separator(f"Running Metadata")
|
||||
logger.info("")
|
||||
if not self.metadata:
|
||||
raise Failed("No metadata to edit")
|
||||
return None
|
||||
logger.info("")
|
||||
util.separator("Running Metadata")
|
||||
logger.info("")
|
||||
for mapping_name, meta in self.metadata.items():
|
||||
methods = {mm.lower(): mm for mm in meta}
|
||||
if test and ("test" not in methods or meta[methods["test"]] is not True):
|
||||
|
|
|
@ -530,7 +530,6 @@ class PlexAPI:
|
|||
return valid_collections
|
||||
|
||||
def get_items(self, method, data):
|
||||
logger.debug(f"Data: {data}")
|
||||
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||
media_type = "Movie" if self.is_movie else "Show"
|
||||
items = []
|
||||
|
|
|
@ -66,6 +66,8 @@ class RadarrAPI:
|
|||
raise Failed(f"Sonarr Error: TMDb ID: {tmdb_id} not found")
|
||||
|
||||
def add_tmdb(self, tmdb_ids, **options):
|
||||
logger.info("")
|
||||
util.separator(f"Adding to Radarr", space=False, border=False)
|
||||
logger.info("")
|
||||
logger.debug(f"TMDb IDs: {tmdb_ids}")
|
||||
tag_nums = []
|
||||
|
|
|
@ -86,6 +86,8 @@ class SonarrAPI:
|
|||
raise Failed(f"Sonarr Error: TVDb ID: {tvdb_id} not found")
|
||||
|
||||
def add_tvdb(self, tvdb_ids, **options):
|
||||
logger.info("")
|
||||
util.separator(f"Adding to Sonarr", space=False, border=False)
|
||||
logger.info("")
|
||||
logger.debug(f"TVDb IDs: {tvdb_ids}")
|
||||
tag_nums = []
|
||||
|
|
|
@ -292,7 +292,6 @@ class TMDbAPI:
|
|||
return tmdb_id
|
||||
|
||||
def get_items(self, method, data, is_movie):
|
||||
logger.debug(f"Data: {data}")
|
||||
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||
media_type = "Movie" if is_movie else "Show"
|
||||
movie_ids = []
|
||||
|
@ -362,6 +361,7 @@ class TMDbAPI:
|
|||
logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({len(movie_ids)} Movie{'' if len(movie_ids) == 1 else 's'})")
|
||||
if not is_movie and len(show_ids) > 0:
|
||||
logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({len(show_ids)} Show{'' if len(show_ids) == 1 else 's'})")
|
||||
logger.debug("")
|
||||
logger.debug(f"TMDb IDs Found: {movie_ids}")
|
||||
logger.debug(f"TVDb IDs Found: {show_ids}")
|
||||
return movie_ids, show_ids
|
||||
|
|
|
@ -157,7 +157,6 @@ class TraktAPI:
|
|||
return trakt_values
|
||||
|
||||
def get_items(self, method, data, is_movie):
|
||||
logger.debug(f"Data: {data}")
|
||||
pretty = self.aliases[method] if method in self.aliases else method
|
||||
media_type = "Movie" if is_movie else "Show"
|
||||
if method in ["trakt_trending", "trakt_popular", "trakt_recommended", "trakt_watched", "trakt_collected"]:
|
||||
|
@ -181,6 +180,7 @@ class TraktAPI:
|
|||
elif (isinstance(trakt_item, (Season, Episode))) and trakt_item.show.pk[1] not in show_ids:
|
||||
show_ids.append(int(trakt_item.show.pk[1]))
|
||||
logger.debug(f"Trakt {media_type} Found: {trakt_items}")
|
||||
logger.debug("")
|
||||
logger.debug(f"TMDb IDs Found: {movie_ids}")
|
||||
logger.debug(f"TVDb IDs Found: {show_ids}")
|
||||
return movie_ids, show_ids
|
||||
|
|
|
@ -163,6 +163,7 @@ class TVDbAPI:
|
|||
show_ids.extend(tvdb_ids)
|
||||
else:
|
||||
raise Failed(f"TVDb Error: Method {method} not supported")
|
||||
logger.debug("")
|
||||
logger.debug(f"TMDb IDs Found: {movie_ids}")
|
||||
logger.debug(f"TVDb IDs Found: {show_ids}")
|
||||
return movie_ids, show_ids
|
||||
|
|
|
@ -352,28 +352,35 @@ def regex_first_int(data, id_type, default=None):
|
|||
else:
|
||||
raise Failed(f"Regex Error: Failed to parse {id_type} from {data}")
|
||||
|
||||
def centered(text, do_print=True):
|
||||
def centered(text, sep=" "):
|
||||
if len(text) > screen_width - 2:
|
||||
raise Failed("text must be shorter then screen_width")
|
||||
space = screen_width - len(text) - 2
|
||||
text = f" {text} "
|
||||
if space % 2 == 1:
|
||||
text += " "
|
||||
text += sep
|
||||
space -= 1
|
||||
side = int(space / 2)
|
||||
final_text = f"{' ' * side}{text}{' ' * side}"
|
||||
if do_print:
|
||||
logger.info(final_text)
|
||||
side = int(space / 2) - 1
|
||||
final_text = f"{sep * side}{text}{sep * side}"
|
||||
return final_text
|
||||
|
||||
def separator(text=None):
|
||||
def separator(text=None, space=True, border=True, debug=False):
|
||||
sep = " " if space else separating_character
|
||||
for handler in logger.handlers:
|
||||
apply_formatter(handler, border=False)
|
||||
logger.info(f"|{separating_character * screen_width}|")
|
||||
border_text = f"|{separating_character * screen_width}|"
|
||||
if border and debug:
|
||||
logger.debug(border_text)
|
||||
elif border:
|
||||
logger.info(border_text)
|
||||
if text:
|
||||
text_list = text.split("\n")
|
||||
for t in text_list:
|
||||
logger.info(f"| {centered(t, do_print=False)} |")
|
||||
logger.info(f"|{separating_character * screen_width}|")
|
||||
logger.info(f"|{sep}{centered(t, sep=sep)}{sep}|")
|
||||
if border and debug:
|
||||
logger.debug(border_text)
|
||||
elif border:
|
||||
logger.info(border_text)
|
||||
for handler in logger.handlers:
|
||||
apply_formatter(handler)
|
||||
|
||||
|
|
|
@ -91,15 +91,14 @@ def start(config_path, is_test, daily, requested_collections, requested_librarie
|
|||
file_handler.doRollover()
|
||||
logger.addHandler(file_handler)
|
||||
util.separator()
|
||||
util.centered(" ")
|
||||
util.centered(" ____ _ __ __ _ __ __ ")
|
||||
util.centered("| _ \\| | _____ __ | \\/ | ___| |_ __ _ | \\/ | __ _ _ __ __ _ __ _ ___ _ __ ")
|
||||
util.centered("| |_) | |/ _ \\ \\/ / | |\\/| |/ _ \\ __/ _` | | |\\/| |/ _` | '_ \\ / _` |/ _` |/ _ \\ '__|")
|
||||
util.centered("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | ")
|
||||
util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| ")
|
||||
util.centered(" |___/ ")
|
||||
util.centered(" Version: 1.9.2 ")
|
||||
util.separator()
|
||||
logger.info(util.centered(" "))
|
||||
logger.info(util.centered(" ____ _ __ __ _ __ __ "))
|
||||
logger.info(util.centered("| _ \\| | _____ __ | \\/ | ___| |_ __ _ | \\/ | __ _ _ __ __ _ __ _ ___ _ __ "))
|
||||
logger.info(util.centered("| |_) | |/ _ \\ \\/ / | |\\/| |/ _ \\ __/ _` | | |\\/| |/ _` | '_ \\ / _` |/ _` |/ _ \\ '__|"))
|
||||
logger.info(util.centered("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | "))
|
||||
logger.info(util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| "))
|
||||
logger.info(util.centered(" |___/ "))
|
||||
logger.info(util.centered(" Version: 1.9.2 "))
|
||||
if daily: start_type = "Daily "
|
||||
elif is_test: start_type = "Test "
|
||||
elif requested_collections: start_type = "Collections "
|
||||
|
@ -132,7 +131,7 @@ def update_libraries(config, is_test, requested_collections, resume_from):
|
|||
logger.info("")
|
||||
util.separator(f"{library.name} Library")
|
||||
logger.info("")
|
||||
util.separator(f"Mapping {library.name} Library")
|
||||
util.separator(f"Mapping {library.name} Library", space=False, border=False)
|
||||
logger.info("")
|
||||
movie_map, show_map = map_guids(config, library)
|
||||
if not is_test and not resume_from and not collection_only and library.mass_update:
|
||||
|
@ -145,18 +144,22 @@ def update_libraries(config, is_test, requested_collections, resume_from):
|
|||
metadata.update_metadata(config.TMDb, is_test)
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
logger.info("")
|
||||
util.separator(f"{'Test ' if is_test else ''}Collections")
|
||||
collections_to_run = metadata.get_collections(requested_collections)
|
||||
if resume_from and resume_from not in collections_to_run:
|
||||
logger.info("")
|
||||
logger.warning(f"Collection: {resume_from} not in Metadata File: {metadata.path}")
|
||||
continue
|
||||
if collections_to_run and not library_only:
|
||||
logger.info("")
|
||||
util.separator(f"{'Test ' if is_test else ''}Collections")
|
||||
logger.removeHandler(library_handler)
|
||||
resume_from = run_collection(config, library, metadata, collections_to_run, is_test, resume_from, movie_map, show_map)
|
||||
logger.addHandler(library_handler)
|
||||
|
||||
if not is_test and not requested_collections:
|
||||
if not is_test and not requested_collections and ((library.show_unmanaged and not library_only) or (library.assets_for_all and not collection_only)):
|
||||
logger.info("")
|
||||
util.separator(f"Other {library.name} Library Operations")
|
||||
logger.info("")
|
||||
unmanaged_collections = []
|
||||
for col in library.get_all_collections():
|
||||
if col.title not in library.collections:
|
||||
|
@ -164,15 +167,16 @@ def update_libraries(config, is_test, requested_collections, resume_from):
|
|||
|
||||
if library.show_unmanaged and not library_only:
|
||||
logger.info("")
|
||||
util.separator(f"Unmanaged Collections in {library.name} Library")
|
||||
util.separator(f"Unmanaged Collections in {library.name} Library", space=False, border=False)
|
||||
logger.info("")
|
||||
for col in unmanaged_collections:
|
||||
logger.info(col.title)
|
||||
logger.info("")
|
||||
logger.info(f"{len(unmanaged_collections)} Unmanaged Collections")
|
||||
|
||||
if library.assets_for_all and not collection_only:
|
||||
logger.info("")
|
||||
util.separator(f"All {'Movies' if library.is_movie else 'Shows'} Assets Check for {library.name} Library")
|
||||
util.separator(f"All {'Movies' if library.is_movie else 'Shows'} Assets Check for {library.name} Library", space=False, border=False)
|
||||
logger.info("")
|
||||
for col in unmanaged_collections:
|
||||
library.update_item_from_assets(col, collection_mode=True)
|
||||
|
@ -236,6 +240,7 @@ def map_guids(config, library):
|
|||
show_map = {}
|
||||
length = 0
|
||||
logger.info(f"Mapping {'Movie' if library.is_movie else 'Show'} Library: {library.name}")
|
||||
logger.info("")
|
||||
items = library.Plex.all()
|
||||
for i, item in enumerate(items, 1):
|
||||
length = util.print_return(length, f"Processing: {i}/{len(items)} {item.title}")
|
||||
|
@ -251,6 +256,7 @@ def map_guids(config, library):
|
|||
for m in main_id:
|
||||
if m in show_map: show_map[m].append(item.ratingKey)
|
||||
else: show_map[m] = [item.ratingKey]
|
||||
logger.info("")
|
||||
logger.info(util.adjust_space(length, f"Processed {len(items)} {'Movies' if library.is_movie else 'Shows'}"))
|
||||
return movie_map, show_map
|
||||
|
||||
|
@ -419,11 +425,19 @@ def run_collection(config, library, metadata, requested_collections, is_test, re
|
|||
logger.info(output_str)
|
||||
logger.info("")
|
||||
|
||||
util.separator(f"Validating {mapping_name} Attributes", space=False, border=False)
|
||||
|
||||
builder = CollectionBuilder(config, library, metadata, mapping_name, collection_attrs)
|
||||
logger.info("")
|
||||
|
||||
util.separator(f"Building {mapping_name} Collection", space=False, border=False)
|
||||
|
||||
if len(builder.schedule) > 0:
|
||||
util.print_multiline(builder.schedule, info=True)
|
||||
|
||||
if len(builder.smart_filter_details) > 0:
|
||||
util.print_multiline(builder.smart_filter_details, info=True)
|
||||
|
||||
if not builder.smart_url:
|
||||
logger.info("")
|
||||
logger.info(f"Sync Mode: {'sync' if builder.sync else 'append'}")
|
||||
|
@ -434,16 +448,24 @@ def run_collection(config, library, metadata, requested_collections, is_test, re
|
|||
logger.info(f"Collection Filter {filter_key}: {filter_value}")
|
||||
|
||||
builder.collect_rating_keys(movie_map, show_map)
|
||||
logger.info("")
|
||||
|
||||
if len(builder.rating_keys) > 0 and builder.build_collection:
|
||||
logger.info("")
|
||||
util.separator(f"Adding to {mapping_name} Collection", space=False, border=False)
|
||||
logger.info("")
|
||||
builder.add_to_collection(movie_map)
|
||||
if len(builder.missing_movies) > 0 or len(builder.missing_shows) > 0:
|
||||
logger.info("")
|
||||
util.separator(f"Missing from Library", space=False, border=False)
|
||||
logger.info("")
|
||||
builder.run_missing()
|
||||
if builder.sync and len(builder.rating_keys) > 0 and builder.build_collection:
|
||||
builder.sync_collection()
|
||||
logger.info("")
|
||||
|
||||
if builder.build_collection:
|
||||
logger.info("")
|
||||
util.separator(f"Updating Details of {mapping_name} Collection", space=False, border=False)
|
||||
logger.info("")
|
||||
builder.update_details()
|
||||
|
||||
if builder.run_again and (len(builder.run_again_movies) > 0 or len(builder.run_again_shows) > 0):
|
||||
|
|
Loading…
Reference in a new issue