added mass_genre_update

This commit is contained in:
meisnate12 2021-03-05 11:04:28 -05:00
parent 8ea4ab0950
commit acc7d86561
5 changed files with 276 additions and 49 deletions

View file

@ -878,17 +878,17 @@ class CollectionBuilder:
elif "tmdb_show_details" in self.backgrounds: set_image("tmdb_show", self.backgrounds, is_background=True)
else: logger.info("No background to update")
def run_collections_again(self, library, collection_obj, movie_map, show_map):
def run_collections_again(self, collection_obj, movie_map, show_map):
collection_items = collection_obj.items() if isinstance(collection_obj, Collections) else []
name = collection_obj.title if isinstance(collection_obj, Collections) else collection_obj
rating_keys = [movie_map[mm] for mm in self.missing_movies if mm in movie_map]
if library.is_show:
if self.library.is_show:
rating_keys.extend([show_map[sm] for sm in self.missing_shows if sm in show_map])
if len(rating_keys) > 0:
for rating_key in rating_keys:
try:
current = library.fetchItem(int(rating_key))
current = self.library.fetchItem(int(rating_key))
except (BadRequest, NotFound):
logger.error(f"Plex Error: Item {rating_key} not found")
continue
@ -897,7 +897,7 @@ class CollectionBuilder:
else:
current.addCollection(name)
logger.info(f"{name} Collection | + | {current.title}")
logger.info(f"{len(rating_keys)} {'Movie' if library.is_movie else 'Show'}{'s' if len(rating_keys) > 1 else ''} Processed")
logger.info(f"{len(rating_keys)} {'Movie' if self.library.is_movie else 'Show'}{'s' if len(rating_keys) > 1 else ''} Processed")
if len(self.missing_movies) > 0:
logger.info("")
@ -913,7 +913,7 @@ class CollectionBuilder:
logger.info("")
logger.info(f"{len(self.missing_movies)} Movie{'s' if len(self.missing_movies) > 1 else ''} Missing")
if len(self.missing_shows) > 0 and library.is_show:
if len(self.missing_shows) > 0 and self.library.is_show:
logger.info("")
for missing_id in self.missing_shows:
if missing_id not in show_map:

View file

@ -1,6 +1,7 @@
import logging, os, random, sqlite3
from contextlib import closing
from datetime import datetime, timedelta
from modules.util import Failed
logger = logging.getLogger("Plex Meta Manager")
@ -13,30 +14,45 @@ class Cache:
cursor.execute("SELECT count(name) FROM sqlite_master WHERE type='table' AND name='guids'")
if cursor.fetchone()[0] == 0:
logger.info(f"Initializing cache database at {cache}")
cursor.execute(
"""CREATE TABLE IF NOT EXISTS guids (
INTEGER PRIMARY KEY,
plex_guid TEXT,
tmdb_id TEXT,
imdb_id TEXT,
tvdb_id TEXT,
anidb_id TEXT,
mal_id TEXT,
expiration_date TEXT,
media_type TEXT)"""
)
cursor.execute(
"""CREATE TABLE IF NOT EXISTS imdb_map (
INTEGER PRIMARY KEY,
imdb_id TEXT,
t_id TEXT,
expiration_date TEXT,
media_type TEXT)"""
)
else:
logger.info(f"Using cache database at {cache}")
cursor.execute(
"""CREATE TABLE IF NOT EXISTS guids (
INTEGER PRIMARY KEY,
plex_guid TEXT UNIQUE,
tmdb_id TEXT,
imdb_id TEXT,
tvdb_id TEXT,
anidb_id TEXT,
mal_id TEXT,
expiration_date TEXT,
media_type TEXT)"""
)
cursor.execute(
"""CREATE TABLE IF NOT EXISTS imdb_map (
INTEGER PRIMARY KEY,
imdb_id TEXT UNIQUE,
t_id TEXT,
expiration_date TEXT,
media_type TEXT)"""
)
cursor.execute(
"""CREATE TABLE IF NOT EXISTS omdb_data (
INTEGER PRIMARY KEY,
imdb_id TEXT UNIQUE,
title TEXT,
year INTEGER,
content_rating TEXT,
genres TEXT,
imdb_rating REAL,
imdb_votes INTEGER,
metacritic_rating INTEGER,
type TEXT,
expiration_date TEXT)"""
)
self.expiration = expiration
self.cache_path = cache
self.omdb_expiration = expiration
def get_ids_from_imdb(self, imdb_id):
tmdb_id, tmdb_expired = self.get_tmdb_id("movie", imdb_id=imdb_id)
@ -82,6 +98,40 @@ class Cache:
expired = time_between_insertion.days > self.expiration
return id_to_return, expired
def get_ids(self, media_type, plex_guid=None, tmdb_id=None, imdb_id=None, tvdb_id=None):
ids_to_return = {}
expired = None
if plex_guid:
key = plex_guid
key_type = "plex_guid"
elif tmdb_id:
key = tmdb_id
key_type = "tmdb_id"
elif imdb_id:
key = imdb_id
key_type = "imdb_id"
elif tvdb_id:
key = tvdb_id
key_type = "tvdb_id"
else:
raise Failed("ID Required")
with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row
with closing(connection.cursor()) as cursor:
cursor.execute(f"SELECT * FROM guids WHERE {key_type} = ? AND media_type = ?", (key, media_type))
row = cursor.fetchone()
if row:
if row["plex_guid"]: ids_to_return["plex"] = row["plex_guid"]
if row["tmdb_id"]: ids_to_return["tmdb"] = int(row["tmdb_id"])
if row["imdb_id"]: ids_to_return["imdb"] = row["imdb_id"]
if row["tvdb_id"]: ids_to_return["tvdb"] = int(row["tvdb_id"])
if row["anidb_id"]: ids_to_return["anidb"] = int(row["anidb_id"])
if row["mal_id"]: ids_to_return["mal"] = int(row["mal_id"])
datetime_object = datetime.strptime(row["expiration_date"], "%Y-%m-%d")
time_between_insertion = datetime.now() - datetime_object
expired = time_between_insertion.days > self.expiration
return ids_to_return, expired
def update_guid(self, media_type, plex_guid, tmdb_id, imdb_id, tvdb_id, anidb_id, mal_id, expired):
expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.expiration)))
with sqlite3.connect(self.cache_path) as connection:
@ -126,3 +176,35 @@ class Cache:
with closing(connection.cursor()) as cursor:
cursor.execute("INSERT OR IGNORE INTO imdb_map(imdb_id) VALUES(?)", (imdb_id,))
cursor.execute("UPDATE imdb_map SET t_id = ?, expiration_date = ?, media_type = ? WHERE imdb_id = ?", (t_id, expiration_date.strftime("%Y-%m-%d"), media_type, imdb_id))
def query_omdb(self, imdb_id):
omdb_dict = {}
expired = None
with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row
with closing(connection.cursor()) as cursor:
cursor.execute("SELECT * FROM omdb_data WHERE imdb_id = ?", (imdb_id,))
row = cursor.fetchone()
if row:
omdb_dict["imdbID"] = row["imdb_id"] if row["imdb_id"] else None
omdb_dict["Title"] = row["title"] if row["title"] else None
omdb_dict["Year"] = row["year"] if row["year"] else None
omdb_dict["Rated"] = row["content_rating"] if row["content_rating"] else None
omdb_dict["Genre"] = row["genres"] if row["genres"] else None
omdb_dict["imdbRating"] = row["imdb_rating"] if row["imdb_rating"] else None
omdb_dict["imdbVotes"] = row["imdb_votes"] if row["imdb_votes"] else None
omdb_dict["Metascore"] = row["metacritic_rating"] if row["metacritic_rating"] else None
omdb_dict["Type"] = row["type"] if row["type"] else None
datetime_object = datetime.strptime(row["expiration_date"], "%Y-%m-%d")
time_between_insertion = datetime.now() - datetime_object
expired = time_between_insertion.days > self.omdb_expiration
return omdb_dict, expired
def update_omdb(self, expired, omdb):
expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.omdb_expiration)))
with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row
with closing(connection.cursor()) as cursor:
cursor.execute("INSERT OR IGNORE INTO omdb_data(imdb_id) VALUES(?)", (omdb.imdb_id,))
update_sql = "UPDATE omdb_data SET title = ?, year = ?, content_rating = ?, genres = ?, imdb_rating = ?, imdb_votes = ?, metacritic_rating = ?, type = ?, expiration_date = ? WHERE imdb_id = ?"
cursor.execute(update_sql, (omdb.title, omdb.year, omdb.content_rating, omdb.genres_str, omdb.imdb_rating, omdb.imdb_votes, omdb.metacritic_rating, omdb.type, expiration_date.strftime("%Y-%m-%d"), omdb.imdb_id))

View file

@ -7,6 +7,7 @@ from modules.imdb import IMDbAPI
from modules.letterboxd import LetterboxdAPI
from modules.mal import MyAnimeListAPI
from modules.mal import MyAnimeListIDList
from modules.omdb import OMDbAPI
from modules.plex import PlexAPI
from modules.radarr import RadarrAPI
from modules.sonarr import SonarrAPI
@ -71,6 +72,7 @@ class Config:
if "tautulli" in new_config: new_config["tautulli"] = new_config.pop("tautulli")
if "radarr" in new_config: new_config["radarr"] = new_config.pop("radarr")
if "sonarr" in new_config: new_config["sonarr"] = new_config.pop("sonarr")
if "omdb" in new_config: new_config["omdb"] = new_config.pop("omdb")
if "trakt" in new_config: new_config["trakt"] = new_config.pop("trakt")
if "mal" in new_config: new_config["mal"] = new_config.pop("mal")
yaml.round_trip_dump(new_config, open(self.config_path, "w"), indent=ind, block_seq_indent=bsi)
@ -172,6 +174,23 @@ class Config:
util.separator()
self.OMDb = None
if "omdb" in self.data:
logger.info("Connecting to OMDb...")
self.omdb = {}
try:
self.omdb["apikey"] = check_for_attribute(self.data, "apikey", parent="omdb", throw=True)
self.omdb["omdb_cache"] = check_for_attribute(self.data, "omdb_cache", parent="omdb", options=" true (Use a cache to store data)\n false (Do not use a cache to store data)", var_type="bool", default=True)
self.omdb["omdb_cache_expiration"] = check_for_attribute(self.data, "omdb_cache_expiration", parent="omdb", var_type="int", default=60)
self.OMDb = OMDbAPI(self.omdb, Cache=self.Cache)
except Failed as e:
logger.error(e)
logger.info(f"OMDb Connection {'Failed' if self.OMDb is None else 'Successful'}")
else:
logger.warning("omdb attribute not found")
util.separator()
self.Trakt = None
if "trakt" in self.data:
logger.info("Connecting to Trakt...")
@ -263,12 +282,35 @@ class Config:
if params["asset_directory"] is None:
logger.warning("Config Warning: Assets will not be used asset_directory attribute must be set under config or under this specific Library")
params["sync_mode"] = check_for_attribute(libs[lib], "sync_mode", parent="settings", test_list=["append", "sync"], options=" append (Only Add Items to the Collection)\n sync (Add & Remove Items from the Collection)", default=self.general["sync_mode"], save=False)
params["show_unmanaged"] = check_for_attribute(libs[lib], "show_unmanaged", parent="settings", var_type="bool", default=self.general["show_unmanaged"], save=False)
params["show_filtered"] = check_for_attribute(libs[lib], "show_filtered", parent="settings", var_type="bool", default=self.general["show_filtered"], save=False)
params["show_missing"] = check_for_attribute(libs[lib], "show_missing", parent="settings", var_type="bool", default=self.general["show_missing"], save=False)
params["save_missing"] = check_for_attribute(libs[lib], "save_missing", parent="settings", var_type="bool", default=self.general["save_missing"], save=False)
if "settings" in libs[lib] and libs[lib]["settings"] and "sync_mode" in libs[lib]["settings"]:
params["sync_mode"] = check_for_attribute(libs[lib], "sync_mode", parent="settings", test_list=["append", "sync"], options=" append (Only Add Items to the Collection)\n sync (Add & Remove Items from the Collection)", default=self.general["sync_mode"], do_print=False, save=False)
else:
params["sync_mode"] = check_for_attribute(libs[lib], "sync_mode", test_list=["append", "sync"], options=" append (Only Add Items to the Collection)\n sync (Add & Remove Items from the Collection)", default=self.general["sync_mode"], do_print=False, save=False)
if "settings" in libs[lib] and libs[lib]["settings"] and "show_unmanaged" in libs[lib]["settings"]:
params["show_unmanaged"] = check_for_attribute(libs[lib], "show_unmanaged", parent="settings", var_type="bool", default=self.general["show_unmanaged"], do_print=False, save=False)
else:
params["show_unmanaged"] = check_for_attribute(libs[lib], "show_unmanaged", var_type="bool", default=self.general["show_unmanaged"], do_print=False, save=False)
if "settings" in libs[lib] and libs[lib]["settings"] and "show_filtered" in libs[lib]["settings"]:
params["show_filtered"] = check_for_attribute(libs[lib], "show_filtered", parent="settings", var_type="bool", default=self.general["show_filtered"], do_print=False, save=False)
else:
params["show_filtered"] = check_for_attribute(libs[lib], "show_filtered", var_type="bool", default=self.general["show_filtered"], do_print=False, save=False)
if "settings" in libs[lib] and libs[lib]["settings"] and "show_missing" in libs[lib]["settings"]:
params["show_missing"] = check_for_attribute(libs[lib], "show_missing", parent="settings", var_type="bool", default=self.general["show_missing"], do_print=False, save=False)
else:
params["show_missing"] = check_for_attribute(libs[lib], "show_missing", var_type="bool", default=self.general["show_missing"], do_print=False, save=False)
if "settings" in libs[lib] and libs[lib]["settings"] and "save_missing" in libs[lib]["settings"]:
params["save_missing"] = check_for_attribute(libs[lib], "save_missing", parent="settings", var_type="bool", default=self.general["save_missing"], do_print=False, save=False)
else:
params["save_missing"] = check_for_attribute(libs[lib], "save_missing", var_type="bool", default=self.general["save_missing"], do_print=False, save=False)
if "mass_genre_update" in libs[lib] and libs[lib]["mass_genre_update"]:
params["mass_genre_update"] = check_for_attribute(libs[lib], "mass_genre_update", test_list=["tmdb", "omdb"], options=" tmdb (Use TMDb Metadata)\n omdb (Use IMDb Metadata through OMDb)", default_is_none=True, save=False)
else:
params["mass_genre_update"] = None
try:
params["metadata_path"] = check_for_attribute(libs[lib], "metadata_path", var_type="path", default=os.path.join(default_dir, f"{lib}.yml"), throw=True)
params["library_type"] = check_for_attribute(libs[lib], "library_type", test_list=["movie", "show"], options=" movie (For Movie Libraries)\n show (For Show Libraries)", throw=True)
@ -295,7 +337,7 @@ class Config:
radarr_params["add"] = check_for_attribute(libs[lib], "add", parent="radarr", var_type="bool", default=self.general["radarr"]["add"], save=False)
radarr_params["search"] = check_for_attribute(libs[lib], "search", parent="radarr", var_type="bool", default=self.general["radarr"]["search"], save=False)
radarr_params["tag"] = check_for_attribute(libs[lib], "search", parent="radarr", var_type="lower_list", default=self.general["radarr"]["tag"], default_is_none=True, save=False)
library.add_Radarr(RadarrAPI(self.TMDb, radarr_params))
library.Radarr = RadarrAPI(self.TMDb, radarr_params)
except Failed as e:
util.print_multiline(e)
logger.info(f"{params['name']} library's Radarr Connection {'Failed' if library.Radarr is None else 'Successful'}")
@ -313,7 +355,7 @@ class Config:
sonarr_params["search"] = check_for_attribute(libs[lib], "search", parent="sonarr", var_type="bool", default=self.general["sonarr"]["search"], save=False)
sonarr_params["season_folder"] = check_for_attribute(libs[lib], "season_folder", parent="sonarr", var_type="bool", default=self.general["sonarr"]["season_folder"], save=False)
sonarr_params["tag"] = check_for_attribute(libs[lib], "search", parent="sonarr", var_type="lower_list", default=self.general["sonarr"]["tag"], default_is_none=True, save=False)
library.add_Sonarr(SonarrAPI(self.TVDb, sonarr_params, library.Plex.language))
library.Sonarr = SonarrAPI(self.TVDb, sonarr_params, library.Plex.language)
except Failed as e:
util.print_multiline(e)
logger.info(f"{params['name']} library's Sonarr Connection {'Failed' if library.Sonarr is None else 'Successful'}")
@ -324,7 +366,7 @@ class Config:
try:
tautulli_params["url"] = check_for_attribute(libs[lib], "url", parent="tautulli", default=self.general["tautulli"]["url"], req_default=True, save=False)
tautulli_params["apikey"] = check_for_attribute(libs[lib], "apikey", parent="tautulli", default=self.general["tautulli"]["apikey"], req_default=True, save=False)
library.add_Tautulli(TautulliAPI(tautulli_params))
library.Tautulli = TautulliAPI(tautulli_params)
except Failed as e:
util.print_multiline(e)
logger.info(f"{params['name']} library's Tautulli Connection {'Failed' if library.Tautulli is None else 'Successful'}")
@ -345,16 +387,19 @@ class Config:
os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(library.timeout)
logger.info("")
util.separator(f"{library.name} Library")
try: library.update_metadata(self.TMDb, test)
except Failed as e: logger.error(e)
logger.info("")
util.separator(f"Mapping {library.name} Library")
logger.info("")
movie_map, show_map = self.map_guids(library)
if not test:
if library.mass_genre_update:
self.mass_metadata(library)
try: library.update_metadata(self.TMDb, test)
except Failed as e: logger.error(e)
logger.info("")
util.separator(f"{library.name} Library {'Test ' if test else ''}Collections")
collections = {c: library.collections[c] for c in util.get_list(requested_collections) if c in library.collections} if requested_collections else library.collections
if collections:
logger.info("")
util.separator(f"Mapping {library.name} Library")
logger.info("")
movie_map, show_map = self.map_guids(library)
for c in collections:
if test and ("test" not in collections[c] or collections[c]["test"] is not True):
no_template_test = True
@ -478,7 +523,7 @@ class Config:
except Failed as e:
util.print_multiline(e, error=True)
continue
builder.run_collections_again(library, collection_obj, movie_map, show_map)
builder.run_collections_again(collection_obj, movie_map, show_map)
def convert_from_imdb(self, imdb_id, language):
update_tmdb = False
@ -534,6 +579,51 @@ class Config:
self.Cache.update_imdb("show", update_tvdb, imdb_id, tvdb_id)
return tmdb_id, tvdb_id
def mass_metadata(self, library):
length = 0
logger.info("")
util.separator(f"Mass Editing {'Movie' if library.is_movie else 'Show'} Library: {library.name}")
logger.info("")
items = library.Plex.all()
for i, item in enumerate(items, 1):
length = util.print_return(length, f"Processing: {i}/{len(items)} {item.title}")
ids, expired = self.Cache.get_ids("movie" if library.is_movie else "show", plex_guid=item.guid)
if library.mass_genre_update:
if library.mass_genre_update == "tmdb":
if "tmdb" not in ids:
util.print_end(length, f"{item.title[:25]:<25} | No TMDb for Guid: {item.guid}")
continue
try:
tmdb_item = self.TMDb.get_movie(ids["tmdb"]) if library.is_movie else self.TMDb.get_show(ids["tmdb"])
except Failed as e:
util.print_end(length, str(e))
continue
new_genres = [genre.name for genre in tmdb_item.genres]
elif library.mass_genre_update == "omdb":
if self.OMDb.limit is True:
break
if "imdb" not in ids:
util.print_end(length, f"{item.title[:25]:<25} | No IMDb for Guid: {item.guid}")
continue
try:
omdb_item = self.OMDb.get_omdb(ids["imdb"])
except Failed as e:
util.print_end(length, str(e))
continue
new_genres = omdb_item.genres
else:
raise Failed
item_genres = [genre.tag for genre in item.genres]
display_str = ""
for genre in (g for g in item_genres if g not in new_genres):
item.removeGenre(genre)
display_str += f"{', ' if len(display_str) > 0 else ''}-{genre}"
for genre in (g for g in new_genres if g not in item_genres):
item.addGenre(genre)
display_str += f"{', ' if len(display_str) > 0 else ''}+{genre}"
if len(display_str) > 0:
util.print_end(length, f"{item.title[:25]:<25} | Genres | {display_str}")
def map_guids(self, library):
movie_map = {}
show_map = {}

63
modules/omdb.py Normal file
View file

@ -0,0 +1,63 @@
import logging, math, re, requests
from lxml import html
from modules import util
from modules.util import Failed
from retrying import retry
logger = logging.getLogger("Plex Meta Manager")
class OMDbObj:
def __init__(self, data):
self._data = data
self.title = data["Title"]
try:
self.year = int(data["Year"])
except (ValueError, TypeError):
self.year = None
self.content_rating = data["Rated"]
self.genres = util.get_list(data["Genre"])
self.genres_str = data["Genre"]
try:
self.imdb_rating = float(data["imdbRating"])
except (ValueError, TypeError):
self.imdb_rating = None
try:
self.imdb_votes = int(str(data["imdbVotes"]).replace(',', ''))
except (ValueError, TypeError):
self.imdb_votes = None
try:
self.metacritic_rating = int(data["Metascore"])
except (ValueError, TypeError):
self.metacritic_rating = None
self.imdb_id = data["imdbID"]
self.type = data["Type"]
class OMDbAPI:
def __init__(self, params, Cache=None):
self.url = "http://www.omdbapi.com/"
self.apikey = params["apikey"]
self.cache = params["omdb_cache"]
self.cache_expiration = params["omdb_cache_expiration"]
self.limit = False
Cache.omdb_expiration = self.cache_expiration
self.Cache = Cache
self.get_omdb("tt0080684")
#@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
def get_omdb(self, imdb_id):
expired = None
if self.cache and self.Cache:
omdb_dict, expired = self.Cache.query_omdb(imdb_id)
if omdb_dict and expired is False:
return OMDbObj(omdb_dict)
response = requests.get(self.url, params={"i": imdb_id, "apikey": self.apikey})
if response.status_code < 400:
omdb = OMDbObj(response.json())
if self.cache and self.Cache:
self.Cache.update_omdb(expired, omdb)
return omdb
else:
error = response.json()['Error']
if error == "Request limit reached!":
self.limit = True
raise Failed(f"OMDb Error: {error}")

View file

@ -60,20 +60,12 @@ class PlexAPI:
self.show_filtered = params["show_filtered"]
self.show_missing = params["show_missing"]
self.save_missing = params["save_missing"]
self.mass_genre_update = params["mass_genre_update"]
self.plex = params["plex"]
self.timeout = params["plex"]["timeout"]
self.missing = {}
self.run_again = []
def add_Radarr(self, Radarr):
self.Radarr = Radarr
def add_Sonarr(self, Sonarr):
self.Sonarr = Sonarr
def add_Tautulli(self, Tautulli):
self.Tautulli = Tautulli
@retry(stop_max_attempt_number=6, wait_fixed=10000)
def search(self, title, libtype=None, year=None):
if libtype is not None and year is not None: return self.Plex.search(title=title, year=year, libtype=libtype)