Merge pull request #50 from meisnate12/develop

v1.2.0
This commit is contained in:
meisnate12 2021-02-21 16:18:56 -05:00 committed by GitHub
commit 7902cffec5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
15 changed files with 1763 additions and 2560 deletions

View file

@ -6,10 +6,9 @@ RUN \
apt-get update && \
apt-get upgrade -y --no-install-recommends && \
apt-get install -y tzdata --no-install-recommends && \
apt-get install -y gcc g++ libxml2-dev libxslt-dev libz-dev && \
echo "**** install python packages ****" && \
pip3 install --no-cache-dir --upgrade --requirement /requirements.txt && \
echo "**** install Plex-Auto-Collections ****" && \
chmod +x /plex_meta_manager.py && \
echo "**** cleanup ****" && \
apt-get autoremove -y && \
apt-get clean && \

View file

@ -1,5 +1,5 @@
# Plex Meta Manager
#### Version 1.1.0
#### Version 1.2.0
The original concept for Plex Meta Manager is [Plex Auto Collections](https://github.com/mza921/Plex-Auto-Collections), but this is rewritten from the ground up to be able to include a scheduler, metadata edits, multiple libraries, and logging. Plex Meta Manager is a Python 3 script that can be continuously run using YAML configuration files to update on a schedule the metadata of the movies, shows, and collections in your libraries as well as automatically build collections based on various methods all detailed in the wiki. Some collection examples that the script can automatically build and update daily include Plex Based Searches like actor, genre, or studio collections or Collections based on TMDb, IMDb, Trakt, TVDb, AniDB, or MyAnimeList lists and various other services.
@ -18,5 +18,5 @@ The script is designed to work with most Metadata agents including the new Plex
* If you're getting an Error or have an Enhancement post in the [Issues](https://github.com/meisnate12/Plex-Meta-Manager/issues)
* If you have a configuration question visit the [Discussions](https://github.com/meisnate12/Plex-Meta-Manager/discussions)
* To see user submitted Metadata configuration files and you could even add your own go to the [Plex Meta Manager Configs](https://github.com/meisnate12/Plex-Meta-Manager-Configs)
* Pull Request are welcome
* Pull Request are welcome but please submit them to the develop branch
* [Buy Me a Pizza](https://www.buymeacoffee.com/meisnate12)

File diff suppressed because it is too large Load diff

View file

@ -1,3 +1,5 @@
## This file is a template remove the .template to use the file
libraries:
Movies:
library_type: movie
@ -5,16 +7,24 @@ libraries:
library_type: show
Anime:
library_type: show
cache:
settings: # Can be individually specified per library as well
cache: true
cache_expiration: 60
plex: # Can be individually specified per library as well
url: http://192.168.1.12:32400
token: ####################
sync_mode: append
asset_directory: config/assets
sync_mode: append
show_unmanaged: true
show_filtered: false
show_missing: true
save_missing: true
plex: # Can be individually specified per library as well
url: http://192.168.1.12:32400
token: ####################
tmdb:
apikey: ################################
language: en
tautulli: # Can be individually specified per library as well
url: http://192.168.1.12:8181
apikey: ################################
radarr: # Can be individually specified per library as well
url: http://192.168.1.12:7878
token: ################################
@ -31,12 +41,6 @@ sonarr: # Can be individually specified
root_folder_path: "S:/TV Shows"
add: false
search: false
tautulli: # Can be individually specified per library as well
url: http://192.168.1.12:8181
apikey: ################################
tmdb:
apikey: ################################
language: en
trakt:
client_id: ################################################################
client_secret: ################################################################

754
modules/builder.py Normal file
View file

@ -0,0 +1,754 @@
import glob, logging, os, re
from datetime import datetime, timedelta
from modules import util
from modules.util import Failed
logger = logging.getLogger("Plex Meta Manager")
class CollectionBuilder:
def __init__(self, config, library, name, data):
self.config = config
self.library = library
self.name = name
self.data = data
self.details = {
"arr_tag": None,
"show_filtered": library.show_filtered,
"show_missing": library.show_missing,
"save_missing": library.save_missing
}
self.methods = []
self.filters = []
self.posters = []
self.backgrounds = []
self.schedule = None
if "template" in data:
if not self.library.templates:
raise Failed("Collection Error: No templates found")
elif not data["template"]:
raise Failed("Collection Error: template attribute is blank")
else:
for data_template in util.get_list(data["template"], split=False):
if not isinstance(data_template, dict):
raise Failed("Collection Error: template attribute is not a dictionary")
elif "name" not in data_template:
raise Failed("Collection Error: template sub-attribute name is required")
elif not data_template["name"]:
raise Failed("Collection Error: template sub-attribute name is blank")
elif data_template["name"] not in self.library.templates:
raise Failed("Collection Error: template {} not found".format(data_template["name"]))
elif not isinstance(self.library.templates[data_template["name"]], dict):
raise Failed("Collection Error: template {} is not a dictionary".format(data_template["name"]))
else:
for tm in data_template:
if not data_template[tm]:
raise Failed("Collection Error: template sub-attribute {} is blank".format(data_template[tm]))
template_name = data_template["name"]
template = self.library.templates[template_name]
default = {}
if "default" in template:
if template["default"]:
if isinstance(template["default"], dict):
for dv in template["default"]:
if template["default"][dv]:
default[dv] = template["default"][dv]
else:
raise Failed("Collection Error: template default sub-attribute {} is blank".format(dv))
else:
raise Failed("Collection Error: template sub-attribute default is not a dictionary")
else:
raise Failed("Collection Error: template sub-attribute default is blank")
for m in template:
if m not in self.data and m != "default":
if template[m]:
attr = None
def replace_txt(txt):
txt = str(txt)
for tm in data_template:
if tm != "name" and "<<{}>>".format(tm) in txt:
txt = txt.replace("<<{}>>".format(tm), str(data_template[tm]))
if "<<collection_name>>" in txt:
txt = txt.replace("<<collection_name>>", str(self.name))
for dm in default:
if "<<{}>>".format(dm) in txt:
txt = txt.replace("<<{}>>".format(dm), str(default[dm]))
if txt in ["true", "True"]: return True
elif txt in ["false", "False"]: return False
else:
try: return int(txt)
except ValueError: return txt
if isinstance(template[m], dict):
attr = {}
for sm in template[m]:
if isinstance(template[m][sm], list):
temp_list = []
for li in template[m][sm]:
temp_list.append(replace_txt(li))
attr[sm] = temp_list
else:
attr[sm] = replace_txt(template[m][sm])
elif isinstance(template[m], list):
attr = []
for li in template[m]:
if isinstance(li, dict):
temp_dict = {}
for sm in li:
temp_dict[sm] = replace_txt(li[sm])
attr.append(temp_dict)
else:
attr.append(replace_txt(li))
else:
attr = replace_txt(template[m])
self.data[m] = attr
else:
raise Failed("Collection Error: template attribute {} is blank".format(m))
skip_collection = True
if "schedule" not in data:
skip_collection = False
elif not data["schedule"]:
logger.error("Collection Error: schedule attribute is blank. Running daily")
skip_collection = False
else:
schedule_list = util.get_list(data["schedule"])
current_time = datetime.now()
next_month = current_time.replace(day=28) + timedelta(days=4)
last_day = next_month - timedelta(days=next_month.day)
for schedule in schedule_list:
run_time = str(schedule).lower()
if run_time.startswith("day") or run_time.startswith("daily"):
skip_collection = False
if run_time.startswith("week") or run_time.startswith("month") or run_time.startswith("year"):
match = re.search("\\(([^)]+)\\)", run_time)
if match:
param = match.group(1)
if run_time.startswith("week"):
if param.lower() in util.days_alias:
weekday = util.days_alias[param.lower()]
self.schedule += "\nScheduled weekly on {}".format(util.pretty_days[weekday])
if weekday == current_time.weekday():
skip_collection = False
else:
logger.error("Collection Error: weekly schedule attribute {} invalid must be a day of the weeek i.e. weekly(Monday)".format(schedule))
elif run_time.startswith("month"):
try:
if 1 <= int(param) <= 31:
self.schedule += "\nScheduled monthly on the {}".format(util.make_ordinal(param))
if current_time.day == int(param) or (current_time.day == last_day.day and int(param) > last_day.day):
skip_collection = False
else:
logger.error("Collection Error: monthly schedule attribute {} invalid must be between 1 and 31".format(schedule))
except ValueError:
logger.error("Collection Error: monthly schedule attribute {} invalid must be an integer".format(schedule))
elif run_time.startswith("year"):
match = re.match("^(1[0-2]|0?[1-9])/(3[01]|[12][0-9]|0?[1-9])$", param)
if match:
month = int(match.group(1))
day = int(match.group(2))
self.schedule += "\nScheduled yearly on {} {}".format(util.pretty_months[month], util.make_ordinal(day))
if current_time.month == month and (current_time.day == day or (current_time.day == last_day.day and day > last_day.day)):
skip_collection = False
else:
logger.error("Collection Error: yearly schedule attribute {} invalid must be in the MM/DD format i.e. yearly(11/22)".format(schedule))
else:
logger.error("Collection Error: failed to parse schedule: {}".format(schedule))
else:
logger.error("Collection Error: schedule attribute {} invalid".format(schedule))
if self.schedule is None:
skip_collection = False
if skip_collection:
raise Failed("Skipping Collection {}".format(c))
logger.info("Scanning {} Collection".format(self.name))
self.collectionless = "plex_collectionless" in data
self.sync = self.library.sync_mode == "sync"
if "sync_mode" in data:
if not data["sync_mode"]: logger.warning("Collection Warning: sync_mode attribute is blank using general: {}".format(self.library.sync_mode))
elif data["sync_mode"] not in ["append", "sync"]: logger.warning("Collection Warning: {} sync_mode invalid using general: {}".format(self.library.sync_mode, data["sync_mode"]))
else: self.sync = data["sync_mode"] == "sync"
if "tmdb_person" in data:
if data["tmdb_person"]:
valid_names = []
for tmdb_id in util.get_int_list(data["tmdb_person"], "TMDb Person ID"):
person = config.TMDb.get_person(tmdb_id)
valid_names.append(person.name)
if "summary" not in self.details and hasattr(person, "biography") and person.biography:
self.details["summary"] = person.biography
if "poster" not in self.details and hasattr(person, "profile_path") and person.profile_path:
self.details["poster"] = ("url", "{}{}".format(config.TMDb.image_url, person.profile_path), "tmdb_person")
if len(valid_names) > 0: self.details["tmdb_person"] = valid_names
else: raise Failed("Collection Error: No valid TMDb Person IDs in {}".format(data["tmdb_person"]))
else:
raise Failed("Collection Error: tmdb_person attribute is blank")
for m in data:
if "tmdb" in m and not config.TMDb: raise Failed("Collection Error: {} requires TMDb to be configured".format(m))
elif "trakt" in m and not config.Trakt: raise Failed("Collection Error: {} requires Trakt todo be configured".format(m))
elif "imdb" in m and not config.IMDb: raise Failed("Collection Error: {} requires TMDb or Trakt to be configured".format(m))
elif "tautulli" in m and not self.library.Tautulli: raise Failed("Collection Error: {} requires Tautulli to be configured".format(m))
elif "mal" in m and not config.MyAnimeList: raise Failed("Collection Error: {} requires MyAnimeList to be configured".format(m))
elif data[m] is not None:
logger.debug("")
logger.debug("Method: {}".format(m))
logger.debug("Value: {}".format(data[m]))
if m in util.method_alias:
method_name = util.method_alias[m]
logger.warning("Collection Warning: {} attribute will run as {}".format(m, method_name))
else:
method_name = m
if method_name in util.show_only_lists and self.library.is_movie:
raise Failed("Collection Error: {} attribute only works for show libraries".format(method_name))
elif method_name in util.movie_only_lists and self.library.is_show:
raise Failed("Collection Error: {} attribute only works for movie libraries".format(method_name))
elif method_name in util.movie_only_searches and self.library.is_show:
raise Failed("Collection Error: {} plex search only works for movie libraries".format(method_name))
elif method_name not in util.collectionless_lists and self.collectionless:
raise Failed("Collection Error: {} attribute does not work for Collectionless collection".format(method_name))
elif method_name == "tmdb_summary":
self.details["summary"] = config.TMDb.get_movie_show_or_collection(util.regex_first_int(data[m], "TMDb ID"), self.library.is_movie).overview
elif method_name == "tmdb_description":
self.details["summary"] = config.TMDb.get_list(util.regex_first_int(data[m], "TMDb List ID")).description
elif method_name == "tmdb_biography":
self.details["summary"] = config.TMDb.get_person(util.regex_first_int(data[m], "TMDb Person ID")).biography
elif method_name == "collection_mode":
if data[m] in ["default", "hide", "hide_items", "show_items", "hideItems", "showItems"]:
if data[m] == "hide_items": self.details[method_name] = "hideItems"
elif data[m] == "show_items": self.details[method_name] = "showItems"
else: self.details[method_name] = data[m]
else:
raise Failed("Collection Error: {} collection_mode Invalid\n| \tdefault (Library default)\n| \thide (Hide Collection)\n| \thide_items (Hide Items in this Collection)\n| \tshow_items (Show this Collection and its Items)".format(data[m]))
elif method_name == "collection_order":
if data[m] in ["release", "alpha"]:
self.details[method_name] = data[m]
else:
raise Failed("Collection Error: {} collection_order Invalid\n| \trelease (Order Collection by release dates)\n| \talpha (Order Collection Alphabetically)".format(data[m]))
elif method_name == "url_poster":
self.posters.append(("url", data[m], method_name))
elif method_name == "tmdb_poster":
self.posters.append(("url", "{}{}".format(config.TMDb.image_url, config.TMDb.get_movie_show_or_collection(util.regex_first_int(data[m], "TMDb ID"), self.library.is_movie).poster_path), method_name))
elif method_name == "tmdb_profile":
self.posters.append(("url", "{}{}".format(config.TMDb.image_url, config.TMDb.get_person(util.regex_first_int(data[m], "TMDb Person ID")).profile_path), method_name))
elif method_name == "file_poster":
if os.path.exists(data[m]): self.posters.append(("file", os.path.abspath(data[m]), method_name))
else: raise Failed("Collection Error: Poster Path Does Not Exist: {}".format(os.path.abspath(data[m])))
elif method_name == "url_background":
self.backgrounds.append(("url", data[m], method_name))
elif method_name == "tmdb_background":
self.backgrounds.append(("url", "{}{}".format(config.TMDb.image_url, config.TMDb.get_movie_show_or_collection(util.regex_first_int(data[m], "TMDb ID"), self.library.is_movie).poster_path), method_name))
elif method_name == "file_background":
if os.path.exists(data[m]): self.backgrounds.append(("file", os.path.abspath(data[m]), method_name))
else: raise Failed("Collection Error: Background Path Does Not Exist: {}".format(os.path.abspath(data[m])))
elif method_name == "label_sync_mode":
if data[m] in ["append", "sync"]: self.details[method_name] = data[m]
else: raise Failed("Collection Error: label_sync_mode attribute must be either 'append' or 'sync'")
elif method_name in ["arr_tag", "label"]:
self.details[method_name] = util.get_list(data[m])
elif method_name in util.boolean_details:
if isinstance(data[m], bool): self.details[method_name] = data[m]
else: raise Failed("Collection Error: {} attribute must be either true or false".format(method_name))
elif method_name in util.all_details:
self.details[method_name] = data[m]
elif method_name in ["year", "year.not"]:
self.methods.append(("plex_search", [[(method_name, util.get_year_list(data[m], method_name))]]))
elif method_name in ["decade", "decade.not"]:
self.methods.append(("plex_search", [[(method_name, util.get_int_list(data[m], util.remove_not(method_name)))]]))
elif method_name in util.tmdb_searches:
final_values = []
for value in util.get_list(data[m]):
if value.lower() == "tmdb" and "tmdb_person" in self.details:
for name in self.details["tmdb_person"]:
final_values.append(name)
else:
final_values.append(value)
self.methods.append(("plex_search", [[(method_name, final_values)]]))
elif method_name in util.plex_searches:
self.methods.append(("plex_search", [[(method_name, util.get_list(data[m]))]]))
elif method_name == "plex_all":
self.methods.append((method_name, [""]))
elif method_name == "plex_collection":
self.methods.append((method_name, self.library.validate_collections(data[m] if isinstance(data[m], list) else [data[m]])))
elif method_name == "anidb_popular":
list_count = util.regex_first_int(data[m], "List Size", default=40)
if 1 <= list_count <= 30:
self.methods.append((method_name, [list_count]))
else:
logger.warning("Collection Error: anidb_popular must be an integer between 1 and 30 defaulting to 30")
self.methods.append((method_name, [30]))
elif method_name == "mal_id":
self.methods.append((method_name, util.get_int_list(data[m], "MyAnimeList ID")))
elif method_name in ["anidb_id", "anidb_relation"]:
self.methods.append((method_name, config.AniDB.validate_anidb_list(util.get_int_list(data[m], "AniDB ID"), self.library.Plex.language)))
elif method_name == "trakt_list":
self.methods.append((method_name, config.Trakt.validate_trakt_list(util.get_list(data[m]))))
elif method_name == "trakt_watchlist":
self.methods.append((method_name, config.Trakt.validate_trakt_watchlist(util.get_list(data[m]), self.library.is_movie)))
elif method_name == "imdb_list":
new_list = []
for imdb_list in util.get_list(data[m], split=False):
new_dictionary = {}
if isinstance(imdb_list, dict):
if "url" in imdb_list and imdb_list["url"]: imdb_url = imdb_list["url"]
else: raise Failed("Collection Error: imdb_list attribute url is required")
list_count = util.regex_first_int(imdb_list["limit"], "List Limit", default=0) if "limit" in imdb_list and imdb_list["limit"] else 0
else:
imdb_url = str(imdb_list)
list_count = 0
new_list.append({"url": imdb_url, "limit": list_count})
self.methods.append((method_name, new_list))
elif method_name in util.dictionary_lists:
if isinstance(data[m], dict):
def get_int(parent, method, data, default, min=1, max=None):
if method not in data: logger.warning("Collection Warning: {} {} attribute not found using {} as default".format(parent, method, default))
elif not data[method]: logger.warning("Collection Warning: {} {} attribute is blank using {} as default".format(parent, method, default))
elif isinstance(data[method], int) and data[method] >= min:
if max is None or data[method] <= max: return data[method]
else: logger.warning("Collection Warning: {} {} attribute {} invalid must an integer <= {} using {} as default".format(parent, method, data[method], max, default))
else: logger.warning("Collection Warning: {} {} attribute {} invalid must an integer >= {} using {} as default".format(parent, method, data[method], min, default))
return default
if method_name == "filters":
for f in data[m]:
if f in util.method_alias or (f.endswith(".not") and f[:-4] in util.method_alias):
filter = (util.method_alias[f[:-4]] + f[-4:]) if f.endswith(".not") else util.method_alias[f]
logger.warning("Collection Warning: {} filter will run as {}".format(f, filter))
else:
filter = f
if filter in util.movie_only_filters and self.library.is_show: raise Failed("Collection Error: {} filter only works for movie libraries".format(filter))
elif data[m][f] is None: raise Failed("Collection Error: {} filter is blank".format(filter))
elif filter in util.all_filters: self.filters.append((filter, data[m][f]))
else: raise Failed("Collection Error: {} filter not supported".format(filter))
elif method_name == "plex_collectionless":
new_dictionary = {}
prefix_list = []
if "exclude_prefix" in data[m] and data[m]["exclude_prefix"]:
if isinstance(data[m]["exclude_prefix"], list): prefix_list.extend(data[m]["exclude_prefix"])
else: prefix_list.append(str(data[m]["exclude_prefix"]))
exact_list = []
if "exclude" in data[m] and data[m]["exclude"]:
if isinstance(data[m]["exclude"], list): exact_list.extend(data[m]["exclude"])
else: exact_list.append(str(data[m]["exclude"]))
if len(prefix_list) == 0 and len(exact_list) == 0: raise Failed("Collection Error: you must have at least one exclusion")
self.details["add_to_arr"] = False
self.details["collection_mode"] = "hide"
self.sync = True
new_dictionary["exclude_prefix"] = prefix_list
new_dictionary["exclude"] = exact_list
self.methods.append((method_name, [new_dictionary]))
elif method_name == "plex_search":
searches = []
used = []
for s in data[m]:
if s in util.method_alias or (s.endswith(".not") and s[:-4] in util.method_alias):
search = (util.method_alias[s[:-4]] + s[-4:]) if s.endswith(".not") else util.method_alias[s]
logger.warning("Collection Warning: {} plex search attribute will run as {}".format(s, search))
else:
search = s
if search in util.movie_only_searches and self.library.is_show:
raise Failed("Collection Error: {} plex search attribute only works for movie libraries".format(search))
elif util.remove_not(search) in used:
raise Failed("Collection Error: Only one instance of {} can be used try using it as a filter instead".format(search))
elif search in ["year", "year.not"]:
years = util.get_year_list(data[m][s], search)
if len(years) > 0:
used.append(util.remove_not(search))
searches.append((search, util.get_int_list(data[m][s], util.remove_not(search))))
elif search in util.plex_searches:
used.append(util.remove_not(search))
searches.append((search, util.get_list(data[m][s])))
else:
logger.error("Collection Error: {} plex search attribute not supported".format(search))
self.methods.append((method_name, [searches]))
elif method_name == "tmdb_discover":
new_dictionary = {"limit": 100}
for attr in data[m]:
if data[m][attr]:
attr_data = data[m][attr]
if (self.library.is_movie and attr in util.discover_movie) or (self.library.is_show and attr in util.discover_tv):
if attr == "language":
if re.compile("([a-z]{2})-([A-Z]{2})").match(str(attr_data)):
new_dictionary[attr] = str(attr_data)
else:
raise Failed("Collection Error: {} attribute {}: {} must match pattern ([a-z]{2})-([A-Z]{2}) e.g. en-US".format(m, attr, attr_data))
elif attr == "region":
if re.compile("^[A-Z]{2}$").match(str(attr_data)):
new_dictionary[attr] = str(attr_data)
else:
raise Failed("Collection Error: {} attribute {}: {} must match pattern ^[A-Z]{2}$ e.g. US".format(m, attr, attr_data))
elif attr == "sort_by":
if (self.library.is_movie and attr_data in util.discover_movie_sort) or (self.library.is_show and attr_data in util.discover_tv_sort):
new_dictionary[attr] = attr_data
else:
raise Failed("Collection Error: {} attribute {}: {} is invalid".format(m, attr, attr_data))
elif attr == "certification_country":
if "certification" in data[m] or "certification.lte" in data[m] or "certification.gte" in data[m]:
new_dictionary[attr] = attr_data
else:
raise Failed("Collection Error: {} attribute {}: must be used with either certification, certification.lte, or certification.gte".format(m, attr))
elif attr in ["certification", "certification.lte", "certification.gte"]:
if "certification_country" in data[m]:
new_dictionary[attr] = attr_data
else:
raise Failed("Collection Error: {} attribute {}: must be used with certification_country".format(m, attr))
elif attr in ["include_adult", "include_null_first_air_dates", "screened_theatrically"]:
if attr_data is True:
new_dictionary[attr] = attr_data
elif attr in ["primary_release_date.gte", "primary_release_date.lte", "release_date.gte", "release_date.lte", "air_date.gte", "air_date.lte", "first_air_date.gte", "first_air_date.lte"]:
if re.compile("[0-1]?[0-9][/-][0-3]?[0-9][/-][1-2][890][0-9][0-9]").match(str(attr_data)):
the_date = str(attr_data).split("/") if "/" in str(attr_data) else str(attr_data).split("-")
new_dictionary[attr] = "{}-{}-{}".format(the_date[2], the_date[0], the_date[1])
elif re.compile("[1-2][890][0-9][0-9][/-][0-1]?[0-9][/-][0-3]?[0-9]").match(str(attr_data)):
the_date = str(attr_data).split("/") if "/" in str(attr_data) else str(attr_data).split("-")
new_dictionary[attr] = "{}-{}-{}".format(the_date[0], the_date[1], the_date[2])
else:
raise Failed("Collection Error: {} attribute {}: {} must match pattern MM/DD/YYYY e.g. 12/25/2020".format(m, attr, attr_data))
elif attr in ["primary_release_year", "year", "first_air_date_year"]:
if isinstance(attr_data, int) and 1800 < attr_data and attr_data < 2200:
new_dictionary[attr] = attr_data
else:
raise Failed("Collection Error: {} attribute {}: must be a valid year e.g. 1990".format(m, attr))
elif attr in ["vote_count.gte", "vote_count.lte", "vote_average.gte", "vote_average.lte", "with_runtime.gte", "with_runtime.lte"]:
if (isinstance(attr_data, int) or isinstance(attr_data, float)) and 0 < attr_data:
new_dictionary[attr] = attr_data
else:
raise Failed("Collection Error: {} attribute {}: must be a valid number greater then 0".format(m, attr))
elif attr in ["with_cast", "with_crew", "with_people", "with_companies", "with_networks", "with_genres", "without_genres", "with_keywords", "without_keywords", "with_original_language", "timezone"]:
new_dictionary[attr] = attr_data
else:
raise Failed("Collection Error: {} attribute {} not supported".format(m, attr))
elif attr == "limit":
if isinstance(attr_data, int) and attr_data > 0:
new_dictionary[attr] = attr_data
else:
raise Failed("Collection Error: {} attribute {}: must be a valid number greater then 0".format(m, attr))
else:
raise Failed("Collection Error: {} attribute {} not supported".format(m, attr))
else:
raise Failed("Collection Error: {} parameter {} is blank".format(m, attr))
if len(new_dictionary) > 1:
self.methods.append((method_name, [new_dictionary]))
else:
raise Failed("Collection Error: {} had no valid fields".format(m))
elif "tautulli" in method_name:
new_dictionary = {}
if method_name == "tautulli_popular": new_dictionary["list_type"] = "popular"
elif method_name == "tautulli_watched": new_dictionary["list_type"] = "watched"
else: raise Failed("Collection Error: {} attribute not supported".format(method_name))
new_dictionary["list_days"] = get_int(method_name, "list_days", data[m], 30)
new_dictionary["list_size"] = get_int(method_name, "list_size", data[m], 10)
new_dictionary["list_buffer"] = get_int(method_name, "list_buffer", data[m], 20)
self.methods.append((method_name, [new_dictionary]))
elif method_name == "mal_season":
new_dictionary = {"sort_by": "anime_num_list_users"}
if "sort_by" not in data[m]: logger.warning("Collection Warning: mal_season sort_by attribute not found using members as default")
elif not data[m]["sort_by"]: logger.warning("Collection Warning: mal_season sort_by attribute is blank using members as default")
elif data[m]["sort_by"] not in util.mal_season_sort: logger.warning("Collection Warning: mal_season sort_by attribute {} invalid must be either 'members' or 'score' using members as default".format(data[m]["sort_by"]))
else: new_dictionary["sort_by"] = util.mal_season_sort[data[m]["sort_by"]]
current_time = datetime.now()
if current_time.month in [1, 2, 3]: new_dictionary["season"] = "winter"
elif current_time.month in [4, 5, 6]: new_dictionary["season"] = "spring"
elif current_time.month in [7, 8, 9]: new_dictionary["season"] = "summer"
elif current_time.month in [10, 11, 12]: new_dictionary["season"] = "fall"
if "season" not in data[m]: logger.warning("Collection Warning: mal_season season attribute not found using the current season: {} as default".format(new_dictionary["season"]))
elif not data[m]["season"]: logger.warning("Collection Warning: mal_season season attribute is blank using the current season: {} as default".format(new_dictionary["season"]))
elif data[m]["season"] not in util.pretty_seasons: logger.warning("Collection Warning: mal_season season attribute {} invalid must be either 'winter', 'spring', 'summer' or 'fall' using the current season: {} as default".format(data[m]["season"], new_dictionary["season"]))
else: new_dictionary["season"] = data[m]["season"]
new_dictionary["year"] = get_int(method_name, "year", data[m], current_time.year, min=1917, max=current_time.year + 1)
new_dictionary["limit"] = get_int(method_name, "limit", data[m], 100, max=500)
self.methods.append((method_name, [new_dictionary]))
elif method_name == "mal_userlist":
new_dictionary = {"status": "all", "sort_by": "list_score"}
if "username" not in data[m]: raise Failed("Collection Error: mal_userlist username attribute is required")
elif not data[m]["username"]: raise Failed("Collection Error: mal_userlist username attribute is blank")
else: new_dictionary["username"] = data[m]["username"]
if "status" not in data[m]: logger.warning("Collection Warning: mal_season status attribute not found using all as default")
elif not data[m]["status"]: logger.warning("Collection Warning: mal_season status attribute is blank using all as default")
elif data[m]["status"] not in util.mal_userlist_status: logger.warning("Collection Warning: mal_season status attribute {} invalid must be either 'all', 'watching', 'completed', 'on_hold', 'dropped' or 'plan_to_watch' using all as default".format(data[m]["status"]))
else: new_dictionary["status"] = util.mal_userlist_status[data[m]["status"]]
if "sort_by" not in data[m]: logger.warning("Collection Warning: mal_season sort_by attribute not found using score as default")
elif not data[m]["sort_by"]: logger.warning("Collection Warning: mal_season sort_by attribute is blank using score as default")
elif data[m]["sort_by"] not in util.mal_userlist_sort: logger.warning("Collection Warning: mal_season sort_by attribute {} invalid must be either 'score', 'last_updated', 'title' or 'start_date' using score as default".format(data[m]["sort_by"]))
else: new_dictionary["sort_by"] = util.mal_userlist_sort[data[m]["sort_by"]]
new_dictionary["limit"] = get_int(method_name, "limit", data[m], 100, max=1000)
self.methods.append((method_name, [new_dictionary]))
else:
raise Failed("Collection Error: {} attribute is not a dictionary: {}".format(m, data[m]))
elif method_name in util.count_lists:
list_count = util.regex_first_int(data[m], "List Size", default=20)
if list_count < 1:
logger.warning("Collection Warning: {} must be an integer greater then 0 defaulting to 20".format(method_name))
list_count = 20
self.methods.append((method_name, [list_count]))
elif method_name in util.tmdb_lists:
values = config.TMDb.validate_tmdb_list(util.get_int_list(data[m], "TMDb {} ID".format(util.tmdb_type[method_name])), util.tmdb_type[method_name])
if method_name[-8:] == "_details":
if method_name in ["tmdb_collection_details", "tmdb_movie_details", "tmdb_show_details"]:
item = config.TMDb.get_movie_show_or_collection(values[0], self.library.is_movie)
if "summary" not in self.details and hasattr(item, "overview") and item.overview:
self.details["summary"] = item.overview
if "background" not in self.details and hasattr(item, "backdrop_path") and item.backdrop_path:
self.details["background"] = ("url", "{}{}".format(config.TMDb.image_url, item.backdrop_path), method_name[:-8])
if "poster" not in self.details and hasattr(item, "poster_path") and item.poster_path:
self.details["poster"] = ("url", "{}{}".format(config.TMDb.image_url, item.poster_path), method_name[:-8])
else:
item = config.TMDb.get_list(values[0])
if "summary" not in self.details and hasattr(item, "description") and item.description:
self.details["summary"] = item.description
self.methods.append((method_name[:-8], values))
else:
self.methods.append((method_name, values))
elif method_name in util.all_lists:
self.methods.append((method_name, util.get_list(data[m])))
elif method_name not in util.other_attributes:
raise Failed("Collection Error: {} attribute not supported".format(method_name))
else:
raise Failed("Collection Error: {} attribute is blank".format(m))
self.do_arr = False
if self.library.Radarr:
self.do_arr = self.details["add_to_arr"] if "add_to_arr" in self.details else self.library.Radarr.add
if self.library.Sonarr:
self.do_arr = self.details["add_to_arr"] if "add_to_arr" in self.details else self.library.Sonarr.add
def run_methods(self, collection_obj, collection_name, map, movie_map, show_map):
items_found = 0
for method, values in self.methods:
logger.debug("")
logger.debug("Method: {}".format(method))
logger.debug("Values: {}".format(values))
pretty = util.pretty_names[method] if method in util.pretty_names else method
for value in values:
items = []
missing_movies = []
missing_shows = []
def check_map(input_ids):
movie_ids, show_ids = input_ids
items_found_inside = 0
if len(movie_ids) > 0:
items_found_inside += len(movie_ids)
for movie_id in movie_ids:
if movie_id in movie_map: items.append(movie_map[movie_id])
else: missing_movies.append(movie_id)
if len(show_ids) > 0:
items_found_inside += len(show_ids)
for show_id in show_ids:
if show_id in show_map: items.append(show_map[show_id])
else: missing_shows.append(show_id)
return items_found_inside
logger.info("")
logger.debug("Value: {}".format(value))
if method == "plex_all":
logger.info("Processing {} {}".format(pretty, "Movies" if self.library.is_movie else "Shows"))
items = self.library.Plex.all()
items_found += len(items)
elif method == "plex_collection":
items = value.items()
items_found += len(items)
elif method == "plex_search":
search_terms = {}
output = ""
for i, attr_pair in enumerate(value):
search_list = attr_pair[1]
final_method = attr_pair[0][:-4] + "!" if attr_pair[0][-4:] == ".not" else attr_pair[0]
if self.library.is_show:
final_method = "show." + final_method
search_terms[final_method] = search_list
ors = ""
for o, param in enumerate(attr_pair[1]):
ors += "{}{}".format(" OR " if o > 0 else "{}(".format(attr_pair[0]), param)
logger.info("\t\t AND {})".format(ors) if i > 0 else "Processing {}: {})".format(pretty, ors))
items = self.library.Plex.search(**search_terms)
items_found += len(items)
elif method == "plex_collectionless":
good_collections = []
for col in self.library.get_all_collections():
keep_collection = True
for pre in value["exclude_prefix"]:
if col.title.startswith(pre) or (col.titleSort and col.titleSort.startswith(pre)):
keep_collection = False
break
for ext in value["exclude"]:
if col.title == ext or (col.titleSort and col.titleSort == ext):
keep_collection = False
break
if keep_collection:
good_collections.append(col.title.lower())
all_items = self.library.Plex.all()
length = 0
for i, item in enumerate(all_items, 1):
length = util.print_return(length, "Processing: {}/{} {}".format(i, len(all_items), item.title))
add_item = True
for collection in item.collections:
if collection.tag.lower() in good_collections:
add_item = False
break
if add_item:
items.append(item)
items_found += len(items)
util.print_end(length, "Processed {} {}".format(len(all_items), "Movies" if self.library.is_movie else "Shows"))
elif "tautulli" in method:
items = self.library.Tautulli.get_items(self.library, time_range=value["list_days"], stats_count=value["list_size"], list_type=value["list_type"], stats_count_buffer=value["list_buffer"])
items_found += len(items)
elif "anidb" in method: items_found += check_map(self.config.AniDB.get_items(method, value, self.library.Plex.language))
elif "mal" in method: items_found += check_map(self.config.MyAnimeList.get_items(method, value))
elif "tvdb" in method: items_found += check_map(self.config.TVDb.get_items(method, value, self.library.Plex.language))
elif "imdb" in method: items_found += check_map(self.config.IMDb.get_items(method, value, self.library.Plex.language))
elif "tmdb" in method: items_found += check_map(self.config.TMDb.get_items(method, value, self.library.is_movie))
elif "trakt" in method: items_found += check_map(self.config.Trakt.get_items(method, value, self.library.is_movie))
else: logger.error("Collection Error: {} method not supported".format(method))
if len(items) > 0: map = self.library.add_to_collection(collection_obj if collection_obj else collection_name, items, self.filters, self.details["show_filtered"], map, movie_map, show_map)
else: logger.error("No items found to add to this collection ")
if len(missing_movies) > 0 or len(missing_shows) > 0:
logger.info("")
if len(missing_movies) > 0:
not_lang = None
terms = None
for filter_method, filter_data in self.filters:
if filter_method.startswith("original_language"):
terms = util.get_list(filter_data, lower=True)
not_lang = filter_method.endswith(".not")
break
missing_movies_with_names = []
for missing_id in missing_movies:
try:
movie = self.config.TMDb.get_movie(missing_id)
title = str(movie.title)
if not_lang is None or (not_lang is True and movie.original_language not in terms) or (not_lang is False and movie.original_language in terms):
missing_movies_with_names.append((title, missing_id))
if self.details["show_missing"] is True:
logger.info("{} Collection | ? | {} (TMDb: {})".format(collection_name, title, missing_id))
elif self.details["show_filtered"] is True:
logger.info("{} Collection | X | {} (TMDb: {})".format(collection_name, title, missing_id))
except Failed as e:
logger.error(e)
logger.info("{} Movie{} Missing".format(len(missing_movies_with_names), "s" if len(missing_movies_with_names) > 1 else ""))
if self.details["save_missing"] is True:
self.library.add_missing(collection_name, missing_movies_with_names, True)
if self.do_arr and self.library.Radarr:
self.library.Radarr.add_tmdb([missing_id for title, missing_id in missing_movies_with_names], tag=self.details["arr_tag"])
if len(missing_shows) > 0 and self.library.is_show:
missing_shows_with_names = []
for missing_id in missing_shows:
try:
title = str(self.config.TVDb.get_series(self.library.Plex.language, tvdb_id=missing_id).title.encode("ascii", "replace").decode())
missing_shows_with_names.append((title, missing_id))
if self.details["show_missing"] is True:
logger.info("{} Collection | ? | {} (TVDB: {})".format(collection_name, title, missing_id))
except Failed as e:
logger.error(e)
logger.info("{} Show{} Missing".format(len(missing_shows_with_names), "s" if len(missing_shows_with_names) > 1 else ""))
if self.details["save_missing"] is True:
self.library.add_missing(c, missing_shows_with_names, False)
if self.do_arr and self.library.Sonarr:
self.library.Sonarr.add_tvdb([missing_id for title, missing_id in missing_shows_with_names], tag=self.details["arr_tag"])
if self.sync and items_found > 0:
logger.info("")
count_removed = 0
for ratingKey, item in map.items():
if item is not None:
logger.info("{} Collection | - | {}".format(collection_name, item.title))
item.removeCollection(collection_name)
count_removed += 1
logger.info("{} {}{} Removed".format(count_removed, "Movie" if self.library.is_movie else "Show", "s" if count_removed == 1 else ""))
logger.info("")
def update_details(self, collection):
edits = {}
if "sort_title" in self.details:
edits["titleSort.value"] = self.details["sort_title"]
edits["titleSort.locked"] = 1
if "content_rating" in self.details:
edits["contentRating.value"] = self.details["content_rating"]
edits["contentRating.locked"] = 1
if "summary" in self.details:
edits["summary.value"] = self.details["summary"]
edits["summary.locked"] = 1
if len(edits) > 0:
logger.debug(edits)
collection.edit(**edits)
collection.reload()
logger.info("Details: have been updated")
if "collection_mode" in self.details:
collection.modeUpdate(mode=self.details["collection_mode"])
if "collection_order" in self.details:
collection.sortUpdate(sort=self.details["collection_order"])
if "label" in self.details:
item_labels = [label.tag for label in collection.labels]
labels = util.get_list(self.details["label"])
if "label_sync_mode" in self.details and self.details["label_sync_mode"] == "sync":
for label in (l for l in item_labels if l not in labels):
collection.removeLabel(label)
logger.info("Detail: Label {} removed".format(label))
for label in (l for l in labels if l not in item_labels):
collection.addLabel(label)
logger.info("Detail: Label {} added".format(label))
if self.library.asset_directory:
name_mapping = self.name
if "name_mapping" in self.details:
if self.details["name_mapping"]: name_mapping = self.details["name_mapping"]
else: logger.error("Collection Error: name_mapping attribute is blank")
for ad in self.library.asset_directory:
path = os.path.join(ad, "{}".format(name_mapping))
if not os.path.isdir(path):
continue
matches = glob.glob(os.path.join(ad, "{}".format(name_mapping), "poster.*"))
if len(matches) > 0:
for match in matches:
self.posters.append(("file", os.path.abspath(match), "asset_directory"))
matches = glob.glob(os.path.join(ad, "{}".format(name_mapping), "background.*"))
if len(matches) > 0:
for match in matches:
self.backgrounds.append(("file", os.path.abspath(match), "asset_directory"))
dirs = [folder for folder in os.listdir(path) if os.path.isdir(os.path.join(path, folder))]
if len(dirs) > 0:
for item in collection.items():
folder = os.path.basename(os.path.dirname(item.locations[0]))
if folder in dirs:
matches = glob.glob(os.path.join(path, folder, "poster.*"))
poster_path = os.path.abspath(matches[0]) if len(matches) > 0 else None
matches = glob.glob(os.path.join(path, folder, "background.*"))
background_path = os.path.abspath(matches[0]) if len(matches) > 0 else None
if poster_path:
item.uploadPoster(filepath=poster_path)
logger.info("Detail: asset_directory updated {}'s poster to [file] {}".format(item.title, poster_path))
if background_path:
item.uploadArt(filepath=background_path)
logger.info("Detail: asset_directory updated {}'s background to [file] {}".format(item.title, background_path))
if poster_path is None and background_path is None:
logger.warning("No Files Found: {}".format(os.path.join(path, folder)))
else:
logger.warning("No Folder: {}".format(os.path.join(path, folder)))
poster = util.choose_from_list(self.posters, "poster", list_type="tuple")
if not poster and "poster" in self.details: poster = self.details["poster"]
if poster:
if poster[0] == "url": collection.uploadPoster(url=poster[1])
else: collection.uploadPoster(filepath=poster[1])
logger.info("Detail: {} updated collection poster to [{}] {}".format(poster[2], poster[0], poster[1]))
background = util.choose_from_list(self.backgrounds, "background", list_type="tuple")
if not background and "background" in self.details: background = self.details["background"]
if background:
if background[0] == "url": collection.uploadArt(url=background[1])
else: collection.uploadArt(filepath=background[1])
logger.info("Detail: {} updated collection background to [{}] {}".format(background[2], background[0], background[1]))

File diff suppressed because it is too large Load diff

View file

@ -17,7 +17,7 @@ class MyAnimeListIDList:
def convert_mal(self, input_id, from_id, to_id):
for attrs in self.ids:
if from_id in attrs and int(attrs[from_id]) == int(input_id) and to_id in attrs and int(attrs[to_id]) > 0:
return attrs[to_id]
return int(attrs[to_id])
raise Failed("MyAnimeList Error: {} ID not found for {}: {}".format(util.pretty_ids[to_id], util.pretty_ids[from_id], input_id))
def find_mal_ids(self, mal_id):

View file

@ -1,9 +1,6 @@
import datetime, logging, os, requests
from lxml import html
from modules import util
from modules.radarr import RadarrAPI
from modules.sonarr import SonarrAPI
from modules.tautulli import TautulliAPI
from modules.util import Failed
from plexapi.exceptions import BadRequest, NotFound, Unauthorized
from plexapi.library import Collections, MovieSection, ShowSection
@ -15,7 +12,7 @@ from ruamel import yaml
logger = logging.getLogger("Plex Meta Manager")
class PlexAPI:
def __init__(self, params):
def __init__(self, params, TMDb, TVDb):
try: self.PlexServer = PlexServer(params["plex"]["url"], params["plex"]["token"], timeout=600)
except Unauthorized: raise Failed("Plex Error: Plex token is invalid")
except ValueError as e: raise Failed("Plex Error: {}".format(e))
@ -29,59 +26,52 @@ class PlexAPI:
try: self.data, ind, bsi = yaml.util.load_yaml_guess_indent(open(params["metadata_path"], encoding="utf-8"))
except yaml.scanner.ScannerError as e: raise Failed("YAML Error: {}".format(str(e).replace("\n", "\n|\t ")))
self.metadata = None
if "metadata" in self.data:
if self.data["metadata"]: self.metadata = self.data["metadata"]
else: logger.warning("Config Warning: metadata attribute is blank")
else: logger.warning("Config Warning: metadata attribute not found")
def get_dict(attribute):
if attribute in self.data:
if self.data[attribute]:
if isinstance(self.data[attribute], dict): return self.data[attribute]
else: logger.waring("Config Warning: {} must be a dictionary".format(attribute))
else: logger.warning("Config Warning: {} attribute is blank".format(attribute))
return None
self.collections = None
if "collections" in self.data:
if self.data["collections"]: self.collections = self.data["collections"]
else: logger.warning("Config Warning: collections attribute is blank")
else: logger.warning("Config Warning: collections attribute not found")
self.metadata = get_dict("metadata")
self.templates = get_dict("templates")
self.collections = get_dict("collections")
if self.metadata is None and self.collections is None:
raise Failed("YAML Error: metadata attributes or collections attribute required")
if params["asset_directory"]:
logger.info("Using Asset Directory: {}".format(params["asset_directory"]))
for ad in params["asset_directory"]:
logger.info("Using Asset Directory: {}".format(ad))
self.TMDb = TMDb
self.TVDb = TVDb
self.Radarr = None
if params["tmdb"] and params["radarr"]:
logger.info("Connecting to {} library's Radarr...".format(params["name"]))
try: self.Radarr = RadarrAPI(params["tmdb"], params["radarr"])
except Failed as e: logger.error(e)
logger.info("{} library's Radarr Connection {}".format(params["name"], "Failed" if self.Radarr is None else "Successful"))
self.Sonarr = None
if params["tvdb"] and params["sonarr"]:
logger.info("Connecting to {} library's Sonarr...".format(params["name"]))
try: self.Sonarr = SonarrAPI(params["tvdb"], params["sonarr"], self.Plex.language)
except Failed as e: logger.error(e)
logger.info("{} library's Sonarr Connection {}".format(params["name"], "Failed" if self.Sonarr is None else "Successful"))
self.Tautulli = None
if params["tautulli"]:
logger.info("Connecting to {} library's Tautulli...".format(params["name"]))
try: self.Tautulli = TautulliAPI(params["tautulli"])
except Failed as e: logger.error(e)
logger.info("{} library's Tautulli Connection {}".format(params["name"], "Failed" if self.Tautulli is None else "Successful"))
self.TMDb = params["tmdb"]
self.TVDb = params["tvdb"]
self.name = params["name"]
self.missing_path = os.path.join(os.path.dirname(os.path.abspath(params["metadata_path"])), "{}_missing.yml".format(os.path.splitext(os.path.basename(params["metadata_path"]))[0]))
self.metadata_path = params["metadata_path"]
self.asset_directory = params["asset_directory"]
self.sync_mode = params["sync_mode"]
self.show_unmanaged = params["show_unmanaged"]
self.show_filtered = params["show_filtered"]
self.show_missing = params["show_missing"]
self.save_missing = params["save_missing"]
self.plex = params["plex"]
self.radarr = params["radarr"]
self.sonarr = params["sonarr"]
self.tautulli = params["tautulli"]
self.missing = {}
def add_Radarr(self, Radarr):
self.Radarr = Radarr
def add_Sonarr(self, Sonarr):
self.Sonarr = Sonarr
def add_Tautulli(self, Tautulli):
self.Tautulli = Tautulli
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def search(self, title, libtype=None, year=None):
@ -115,51 +105,18 @@ class PlexAPI:
raise Failed("Collection Error: No valid Plex Collections in {}".format(collections[c][m]))
return valid_collections
def del_collection_if_empty(self, collection):
missing_data = {}
if not os.path.exists(self.missing_path):
with open(self.missing_path, "w"): pass
def add_missing(self, collection, items, is_movie):
col_name = collection.encode("ascii", "replace").decode()
if col_name not in self.missing:
self.missing[col_name] = {}
section = "Movies Missing (TMDb IDs)" if is_movie else "Shows Missing (TVDb IDs)"
if section not in self.missing[col_name]:
self.missing[col_name][section] = {}
for title, item_id in items:
self.missing[col_name][section][int(item_id)] = str(title).encode("ascii", "replace").decode()
with open(self.missing_path, "w"): pass
try:
missing_data, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.missing_path))
if not missing_data:
missing_data = {}
if collection in missing_data and len(missing_data[collection]) == 0:
del missing_data[collection]
yaml.round_trip_dump(missing_data, open(self.missing_path, "w"), indent=ind, block_seq_indent=bsi)
except yaml.scanner.ScannerError as e:
logger.error("YAML Error: {}".format(str(e).replace("\n", "\n|\t ")))
def clear_collection_missing(self, collection):
missing_data = {}
if not os.path.exists(self.missing_path):
with open(self.missing_path, "w"): pass
try:
missing_data, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.missing_path))
if not missing_data:
missing_data = {}
if collection in missing_data:
missing_data[collection.encode("ascii", "replace").decode()] = {}
yaml.round_trip_dump(missing_data, open(self.missing_path, "w"), indent=ind, block_seq_indent=bsi)
except yaml.scanner.ScannerError as e:
logger.error("YAML Error: {}".format(str(e).replace("\n", "\n|\t ")))
def save_missing(self, collection, items, is_movie):
missing_data = {}
if not os.path.exists(self.missing_path):
with open(self.missing_path, "w"): pass
try:
missing_data, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.missing_path))
if not missing_data:
missing_data = {}
col_name = collection.encode("ascii", "replace").decode()
if col_name not in missing_data:
missing_data[col_name] = {}
section = "Movies Missing (TMDb IDs)" if is_movie else "Shows Missing (TVDb IDs)"
if section not in missing_data[col_name]:
missing_data[col_name][section] = {}
for title, item_id in items:
missing_data[col_name][section][int(item_id)] = str(title).encode("ascii", "replace").decode()
yaml.round_trip_dump(missing_data, open(self.missing_path, "w"), indent=ind, block_seq_indent=bsi)
yaml.round_trip_dump(self.missing, open(self.missing_path, "w"))
except yaml.scanner.ScannerError as e:
logger.error("YAML Error: {}".format(str(e).replace("\n", "\n|\t ")))
@ -170,8 +127,11 @@ class PlexAPI:
max_length = len(str(total))
length = 0
for i, item in enumerate(items, 1):
try: current = self.fetchItem(item.ratingKey if isinstance(item, (Movie, Show)) else int(item))
except BadRequest: raise Failed("Plex Error: Item {} not found".format(item))
try:
current = self.fetchItem(item.ratingKey if isinstance(item, (Movie, Show)) else int(item))
except (BadRequest, NotFound):
logger.error("Plex Error: Item {} not found".format(item))
continue
match = True
if filters:
length = util.print_return(length, "Filtering {}/{} {}".format((" " * (max_length - len(str(i)))) + str(i), total, current.title))
@ -185,7 +145,7 @@ class PlexAPI:
match = False
break
elif method == "original_language":
terms = f[1] if isinstance(f[1], list) else [lang.lower() for lang in str(f[1]).split(", ")]
terms = util.get_list(f[1], lower=True)
tmdb_id = None
movie = None
for key, value in movie_map.items():
@ -214,7 +174,7 @@ class PlexAPI:
match = False
break
else:
terms = f[1] if isinstance(f[1], list) else str(f[1]).split(", ")
terms = util.get_list(f[1])
if method in ["video_resolution", "audio_language", "subtitle_language"]:
for media in current.media:
if method == "video_resolution": attrs = [media.videoResolution]
@ -241,13 +201,15 @@ class PlexAPI:
def search_item(self, data, year=None):
return util.choose_from_list(self.search(data, year=year), "movie" if self.is_movie else "show", str(data), exact=True)
def update_metadata(self, TMDb):
def update_metadata(self, TMDb, test):
logger.info("")
util.seperator("{} Library Metadata".format(self.name))
logger.info("")
if not self.metadata:
raise Failed("No metadata to edit")
for m in self.metadata:
if test and ("test" not in self.metadata[m] or self.metadata[m]["test"] is not True):
continue
logger.info("")
util.seperator()
logger.info("")
@ -316,10 +278,11 @@ class PlexAPI:
add_edit("originally_available", str(item.originallyAvailableAt)[:-9], self.metadata[m], key="originallyAvailableAt", value=originally_available)
add_edit("rating", item.rating, self.metadata[m], value=rating)
add_edit("content_rating", item.contentRating, self.metadata[m], key="contentRating")
originalTitle = item.originalTitle if self.is_movie else item._data.attrib.get("originalTitle")
add_edit("original_title", originalTitle, self.metadata[m], key="originalTitle", value=original_title)
item_original_title = item.originalTitle if self.is_movie else item._data.attrib.get("originalTitle")
add_edit("original_title", item_original_title, self.metadata[m], key="originalTitle", value=original_title)
add_edit("studio", item.studio, self.metadata[m], value=studio)
add_edit("tagline", item.tagline, self.metadata[m], value=tagline)
item_tagline = item.tagline if self.is_movie else item._data.attrib.get("tagline")
add_edit("tagline", item_tagline, self.metadata[m], value=tagline)
add_edit("summary", item.summary, self.metadata[m], value=summary)
if len(edits) > 0:
logger.debug("Details Update: {}".format(edits))

View file

@ -10,8 +10,7 @@ class RadarrAPI:
self.url_params = {"apikey": "{}".format(params["token"])}
self.base_url = "{}/api{}".format(params["url"], "/v3/" if params["version"] == "v3" else "/")
try:
response = requests.get("{}system/status".format(self.base_url), params=self.url_params)
result = response.json()
result = requests.get("{}system/status".format(self.base_url), params=self.url_params).json()
except Exception as e:
util.print_stacktrace()
raise Failed("Radarr Error: Could not connect to Radarr at {}".format(params["url"]))
@ -19,10 +18,9 @@ class RadarrAPI:
raise Failed("Radarr Error: Invalid API Key")
if "version" not in result:
raise Failed("Radarr Error: Unexpected Response Check URL")
response = requests.get("{}{}".format(self.base_url, "qualityProfile" if params["version"] == "v3" else "profile"), params=self.url_params)
self.quality_profile_id = None
profiles = ""
for profile in response.json():
for profile in self.send_get("{}{}".format(self.base_url, "qualityProfile" if params["version"] == "v3" else "profile")):
if len(profiles) > 0:
profiles += ", "
profiles += profile["name"]
@ -37,11 +35,24 @@ class RadarrAPI:
self.root_folder_path = params["root_folder_path"]
self.add = params["add"]
self.search = params["search"]
self.tag = params["tag"]
def add_tmdb(self, tmdb_ids):
def add_tmdb(self, tmdb_ids, tag=None):
logger.info("")
logger.debug("TMDb IDs: {}".format(tmdb_ids))
tag_nums = []
add_count = 0
if tag is None:
tag = self.tag
if tag:
tag_cache = {}
for label in tag:
self.send_post("{}tag".format(self.base_url), {"label": str(label)})
for t in self.send_get("{}tag".format(self.base_url)):
tag_cache[t["label"]] = t["id"]
for label in tag:
if label in tag_cache:
tag_nums.append(tag_cache[label])
for tmdb_id in tmdb_ids:
try:
movie = self.tmdb.get_movie(tmdb_id)
@ -74,6 +85,8 @@ class RadarrAPI:
"images": [{"covertype": "poster", "url": poster}],
"addOptions": {"searchForMovie": self.search}
}
if tag_nums:
url_json["tags"] = tag_nums
response = self.send_post("{}movie".format(self.base_url), url_json)
if response.status_code < 400:
logger.info("Added to Radarr | {:<6} | {}".format(tmdb_id, movie.title))
@ -86,6 +99,10 @@ class RadarrAPI:
logger.error("Radarr Error: {}".format(response.json()))
logger.info("{} Movie{} added to Radarr".format(add_count, "s" if add_count > 1 else ""))
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_get(self, url):
return requests.get(url, params=self.url_params).json()
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_post(self, url, url_json):
return requests.post(url, json=url_json, params=self.url_params)

View file

@ -10,8 +10,7 @@ class SonarrAPI:
self.url_params = {"apikey": "{}".format(params["token"])}
self.base_url = "{}/api{}".format(params["url"], "/v3/" if params["version"] == "v3" else "/")
try:
response = requests.get("{}system/status".format(self.base_url), params=self.url_params)
result = response.json()
result = requests.get("{}system/status".format(self.base_url), params=self.url_params).json()
except Exception as e:
util.print_stacktrace()
raise Failed("Sonarr Error: Could not connect to Sonarr at {}".format(params["url"]))
@ -19,10 +18,9 @@ class SonarrAPI:
raise Failed("Sonarr Error: Invalid API Key")
if "version" not in result:
raise Failed("Sonarr Error: Unexpected Response Check URL")
response = requests.get("{}{}".format(self.base_url, "qualityProfile" if params["version"] == "v3" else "profile"), params=self.url_params)
self.quality_profile_id = None
profiles = ""
for profile in response.json():
for profile in self.send_get("{}{}".format(self.base_url, "qualityProfile" if params["version"] == "v3" else "profile")):
if len(profiles) > 0:
profiles += ", "
profiles += profile["name"]
@ -38,11 +36,24 @@ class SonarrAPI:
self.root_folder_path = params["root_folder_path"]
self.add = params["add"]
self.search = params["search"]
self.tag = params["tag"]
def add_tvdb(self, tvdb_ids):
def add_tvdb(self, tvdb_ids, tag=None):
logger.info("")
logger.debug("TVDb IDs: {}".format(tvdb_ids))
tag_nums = []
add_count = 0
if tag is None:
tag = self.tag
if tag:
tag_cache = {}
for label in tag:
self.send_post("{}tag".format(self.base_url), {"label": str(label)})
for t in self.send_get("{}tag".format(self.base_url)):
tag_cache[t["label"]] = t["id"]
for label in tag:
if label in tag_cache:
tag_nums.append(tag_cache[label])
for tvdb_id in tvdb_ids:
try:
show = self.tvdb.get_series(self.language, tvdb_id=tvdb_id)
@ -65,6 +76,8 @@ class SonarrAPI:
"images": [{"covertype": "poster", "url": show.poster_path}],
"addOptions": {"searchForMissingEpisodes": self.search}
}
if tag_nums:
url_json["tags"] = tag_nums
response = self.send_post("{}series".format(self.base_url), url_json)
if response.status_code < 400:
logger.info("Added to Sonarr | {:<6} | {}".format(tvdb_id, show.title))
@ -77,6 +90,10 @@ class SonarrAPI:
logger.error("Sonarr Error: {}".format(response.json()))
logger.info("{} Show{} added to Sonarr".format(add_count, "s" if add_count > 1 else ""))
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_get(self, url):
return requests.get(url, params=self.url_params).json()
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def send_post(self, url, url_json):
return requests.post(url, json=url_json, params=self.url_params)

View file

@ -10,10 +10,8 @@ def run_tests(default_dir):
config = Config(default_dir)
logger.info("")
util.seperator("Mapping Tests")
config.map_guids(config.libraries[0])
config.map_guids(config.libraries[1])
config.map_guids(config.libraries[2])
for library in config.libraries:
config.map_guids(library)
anidb_tests(config)
imdb_tests(config)
mal_tests(config)

View file

@ -38,7 +38,7 @@ class TMDbAPI:
def convert_to_tmdb(self, external_id, external_source, is_movie):
search_results = self.Movie.external(external_id=external_id, external_source=external_source)
search = search_results["movie_results" if is_movie else "tv_results"]
if len(search) == 1: return search[0]["id"]
if len(search) == 1: return int(search[0]["id"])
else: raise Failed("TMDb Error: No TMDb ID found for {} {}".format(external_source.upper().replace("B_", "b "), external_id))
def convert_tmdb_to_imdb(self, tmdb_id, is_movie=True): return self.convert_from_tmdb(tmdb_id, "imdb_id", is_movie)

View file

@ -89,9 +89,9 @@ class TraktAPI:
lookup = Trakt["search"].lookup(external_id, from_source, media_type)
if lookup:
lookup = lookup[0] if isinstance(lookup, list) else lookup
return lookup.get_key(to_source)
else:
raise Failed("No {} ID found for {} ID {}".format(to_source.upper().replace("B", "b"), from_source.upper().replace("B", "b"), external_id))
if lookup.get_key(to_source):
return lookup.get_key(to_source) if to_source == "imdb" else int(lookup.get_key(to_source))
raise Failed("No {} ID found for {} ID {}".format(to_source.upper().replace("B", "b"), from_source.upper().replace("B", "b"), external_id))
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def trending(self, amount, is_movie):

View file

@ -249,7 +249,14 @@ collectionless_lists = [
"collection_order", "plex_collectionless",
"url_poster", "tmdb_poster", "tmdb_profile", "file_poster",
"url_background", "file_background",
"name_mapping"
"name_mapping", "label", "label_sync_mode"
]
other_attributes = [
"schedule",
"sync_mode",
"template",
"test",
"tmdb_person"
]
dictionary_lists = [
"filters",
@ -374,18 +381,25 @@ movie_only_filters = [
"audio_language", "audio_language.not",
"country", "country.not",
"director", "director.not",
"original_language", "original_language.not",
"original_language", "original_language.not",
"subtitle_language", "subtitle_language.not",
"video_resolution", "video_resolution.not",
"writer", "writer.not"
]
boolean_details = [
"add_to_arr",
"show_filtered",
"show_missing",
"save_missing"
]
all_details = [
"sort_title", "content_rating",
"summary", "tmdb_summary", "tmdb_description", "tmdb_biography",
"collection_mode", "collection_order",
"url_poster", "tmdb_poster", "tmdb_profile", "file_poster",
"url_background", "file_background",
"name_mapping", "add_to_arr"
"name_mapping", "add_to_arr", "arr_tag", "label",
"show_filtered", "show_missing", "save_missing"
]
discover_movie = [
"language", "with_original_language", "region", "sort_by",
@ -475,10 +489,12 @@ def choose_from_list(datalist, description, data=None, list_type="title", exact=
else:
return None
def get_list(data):
def get_list(data, lower=False, split=True):
if isinstance(data, list): return data
elif isinstance(data, dict): return [data]
else: return str(data).split(", ")
elif split is False: return [str(data)]
elif lower is True: return [d.strip().lower() for d in str(data).split(",")]
else: return [d.strip() for d in str(data).split(",")]
def get_int_list(data, id_type):
values = get_list(data)
@ -570,7 +586,7 @@ def windows_input(prompt, timeout=5):
def print_multiline(lines, info=False, warning=False, error=False, critical=False):
for i, line in enumerate(lines.split("\n")):
for i, line in enumerate(str(lines).split("\n")):
if critical: logger.critical(line)
elif error: logger.error(line)
elif warning: logger.warning(line)

View file

@ -3,10 +3,13 @@ from modules import tests, util
from modules.config import Config
parser = argparse.ArgumentParser()
parser.add_argument("--test", dest="test", help=argparse.SUPPRESS, action="store_true", default=False)
parser.add_argument("--mytests", dest="tests", help=argparse.SUPPRESS, action="store_true", default=False)
parser.add_argument("--debug", dest="debug", help=argparse.SUPPRESS, action="store_true", default=False)
parser.add_argument("-c", "--config", dest="config", help="Run with desired *.yml file", type=str)
parser.add_argument("-t", "--time", dest="time", help="Time to update each day use format HH:MM (Default: 03:00)", default="03:00", type=str)
parser.add_argument("-r", "--run", dest="run", help="Run without the scheduler", action="store_true", default=False)
parser.add_argument("-rt", "--test", "--tests", "--run-test", "--run-tests", dest="test", help="Run in debug mode with only collections that have test: true", action="store_true", default=False)
parser.add_argument("-cl", "--collection", "--collections", dest="collections", help="Process only specified collections (comma-separated list)", type=str, default="")
parser.add_argument("-d", "--divider", dest="divider", help="Character that divides the sections (Default: '=')", default="=", type=str)
parser.add_argument("-w", "--width", dest="width", help="Screen Width (Default: 100)", default=100, type=int)
args = parser.parse_args()
@ -41,7 +44,7 @@ file_handler.setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(lev
cmd_handler = logging.StreamHandler()
cmd_handler.setFormatter(logging.Formatter("| %(message)-100s |"))
cmd_handler.setLevel(logging.INFO)
cmd_handler.setLevel(logging.DEBUG if args.tests or args.test or args.debug else logging.INFO)
logger.addHandler(cmd_handler)
logger.addHandler(file_handler)
@ -56,30 +59,34 @@ logger.info(util.get_centered_text("| |_) | |/ _ \ \/ / | |\/| |/ _ \ __/ _` | |
logger.info(util.get_centered_text("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | "))
logger.info(util.get_centered_text("|_| |_|\___/_/\_\ |_| |_|\___|\__\__,_| |_| |_|\__,_|_| |_|\__,_|\__, |\___|_| "))
logger.info(util.get_centered_text(" |___/ "))
logger.info(util.get_centered_text(" Version: 1.1.0 "))
logger.info(util.get_centered_text(" Version: 1.2.0 "))
util.seperator()
if args.test:
if args.tests:
tests.run_tests(default_dir)
sys.exit(0)
def start(config_path):
def start(config_path, test, daily, collections):
if daily: type = "Daily "
elif test: type = "Test "
elif collections: type = "Collections "
else: type = ""
util.seperator("Starting {}Run".format(type))
try:
util.seperator("Starting Daily Run")
config = Config(default_dir, config_path)
config.update_libraries()
config.update_libraries(test, collections)
except Exception as e:
util.print_stacktrace()
logger.critical(e)
logger.info("")
util.seperator("Finished Daily Run")
util.seperator("Finished {}Run".format(type))
try:
if args.run:
start(args.config)
if args.run or args.test or args.collections:
start(args.config, args.test, False, args.collections)
else:
length = 0
schedule.every().day.at(args.time).do(start, args.config)
schedule.every().day.at(args.time).do(start, args.config, False, True, None)
while True:
schedule.run_pending()
current = datetime.datetime.now().strftime("%H:%M")