mirror of
https://github.com/meisnate12/Plex-Meta-Manager
synced 2024-11-22 04:23:08 +00:00
Initial Commit
This commit is contained in:
parent
277cd010c8
commit
23d5914682
22 changed files with 5626 additions and 2 deletions
10
.dockerignore
Normal file
10
.dockerignore
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
**/dist
|
||||||
|
**/build
|
||||||
|
*.spec
|
||||||
|
**/__pycache__
|
||||||
|
/.vscode
|
||||||
|
**/log
|
||||||
|
README.md
|
||||||
|
LICENSE
|
||||||
|
.gitignore
|
||||||
|
.git
|
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -8,6 +8,10 @@ __pycache__/
|
||||||
|
|
||||||
# Distribution / packaging
|
# Distribution / packaging
|
||||||
.Python
|
.Python
|
||||||
|
/modules/test.py
|
||||||
|
logs/
|
||||||
|
config/*
|
||||||
|
!config/*.template
|
||||||
build/
|
build/
|
||||||
develop-eggs/
|
develop-eggs/
|
||||||
dist/
|
dist/
|
||||||
|
|
21
Dockerfile
Normal file
21
Dockerfile
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
FROM python:3-slim
|
||||||
|
VOLUME /config
|
||||||
|
COPY . /
|
||||||
|
RUN \
|
||||||
|
echo "**** install system packages ****" && \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get upgrade -y --no-install-recommends && \
|
||||||
|
apt-get install -y tzdata --no-install-recommends && \
|
||||||
|
echo "**** install python packages ****" && \
|
||||||
|
pip3 install --no-cache-dir --upgrade --requirement /requirements.txt && \
|
||||||
|
echo "**** install Plex-Auto-Collections ****" && \
|
||||||
|
chmod +x /plex_meta_manager.py && \
|
||||||
|
echo "**** cleanup ****" && \
|
||||||
|
apt-get autoremove -y && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf \
|
||||||
|
/requirements.txt \
|
||||||
|
/tmp/* \
|
||||||
|
/var/tmp/* \
|
||||||
|
/var/lib/apt/lists/*
|
||||||
|
ENTRYPOINT ["python3", "plex_meta_manager.py"]
|
22
README.md
22
README.md
|
@ -1,2 +1,20 @@
|
||||||
# Plex-Meta-Manager
|
# Plex Meta Manager
|
||||||
Python script to update metadata information for movies, shows, and collections
|
|
||||||
|
The original concept for Plex Meta Manager is [Plex Auto Collections](https://github.com/mza921/Plex-Auto-Collections), but this is rewritten from the ground up to be able to include a scheduler, metadata edits, multiple libraries, and logging. Plex Meta Manager is a Python 3 script that can be continuously run using YMAL configuration files to update on a schedule the metadata of the movies, shows, and collections in your libraries as well as automatically build collections based on various methods all detailed in the wiki. Some collection examples that the script can automatically build and update daily include Plex Based Searches like actor, genre, or studio collections or Collections based on TMDb, IMDb, Trakt, TVDb, AniDB, or MyAnimeList lists and various other services.
|
||||||
|
|
||||||
|
The script can update many metadata fields for movies, shows, collections, seasons, and episodes and can act as a backup if your plex DB goes down. It can even update metadata the plex UI can't like Season Names. If the time is put into the metadata configuration file you can have a way to recreate your library and all its metadata changes with the click of a button.
|
||||||
|
|
||||||
|
The script is designed to work with most Metadata agents including the new Plex Movie Agent, [Hama Anime Agent](https://github.com/ZeroQI/Hama.bundle), and [MyAnimeList Anime Agent](https://github.com/Fribb/MyAnimeList.bundle).
|
||||||
|
|
||||||
|
## Getting Started
|
||||||
|
|
||||||
|
* [Wiki](https://github.com/meisnate12/Plex-Meta-Manager/wiki)
|
||||||
|
* [Local Installation](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Local-Installation)
|
||||||
|
* [Docker Installation](https://github.com/meisnate12/Plex-Meta-Manager/wiki/Docker)
|
||||||
|
|
||||||
|
## Support
|
||||||
|
|
||||||
|
* If you're getting an Error or have an Enhancement post in the [Issues](https://github.com/meisnate12/Plex-Meta-Manager/issues)
|
||||||
|
* If you have a configuration question or want to see some example and user shared configurations visit the [Discussions](https://github.com/meisnate12/Plex-Meta-Manager/discussions)
|
||||||
|
* Pull Request are welcome
|
||||||
|
* [Buy Me a Pizza](https://www.buymeacoffee.com/meisnate12)
|
||||||
|
|
1418
config/Movies.yml.template
Normal file
1418
config/Movies.yml.template
Normal file
File diff suppressed because it is too large
Load diff
57
config/config.yml.template
Normal file
57
config/config.yml.template
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
libraries:
|
||||||
|
Movies:
|
||||||
|
library_type: movie
|
||||||
|
TV Shows:
|
||||||
|
library_type: show
|
||||||
|
Anime:
|
||||||
|
library_type: show
|
||||||
|
cache:
|
||||||
|
cache: true
|
||||||
|
cache_expiration: 60
|
||||||
|
plex: # Can be individually specified per library as well
|
||||||
|
url: http://192.168.1.12:32400
|
||||||
|
token: ####################
|
||||||
|
sync_mode: append
|
||||||
|
asset_directory: config/assets
|
||||||
|
radarr: # Can be individually specified per library as well
|
||||||
|
url: http://192.168.1.12:7878
|
||||||
|
token: ################################
|
||||||
|
version: v2
|
||||||
|
quality_profile: HD-1080p
|
||||||
|
root_folder_path: S:/Movies
|
||||||
|
add: false
|
||||||
|
search: false
|
||||||
|
sonarr: # Can be individually specified per library as well
|
||||||
|
url: http://192.168.1.12:8989
|
||||||
|
token: ################################
|
||||||
|
version: v2
|
||||||
|
quality_profile: HD-1080p
|
||||||
|
root_folder_path: "S:/TV Shows"
|
||||||
|
add: false
|
||||||
|
search: false
|
||||||
|
tautulli: # Can be individually specified per library as well
|
||||||
|
url: http://192.168.1.12:8181
|
||||||
|
apikey: ################################
|
||||||
|
tmdb:
|
||||||
|
apikey: ################################
|
||||||
|
language: en
|
||||||
|
trakt:
|
||||||
|
client_id: ################################################################
|
||||||
|
client_secret: ################################################################
|
||||||
|
authorization:
|
||||||
|
# everything below is autofilled by the script
|
||||||
|
access_token:
|
||||||
|
token_type:
|
||||||
|
expires_in:
|
||||||
|
refresh_token:
|
||||||
|
scope: public
|
||||||
|
created_at:
|
||||||
|
mal:
|
||||||
|
client_id: ################################
|
||||||
|
client_secret: ################################################################
|
||||||
|
authorization:
|
||||||
|
# everything below is autofilled by the script
|
||||||
|
access_token:
|
||||||
|
token_type:
|
||||||
|
expires_in:
|
||||||
|
refresh_token:
|
116
modules/anidb.py
Normal file
116
modules/anidb.py
Normal file
|
@ -0,0 +1,116 @@
|
||||||
|
import logging, requests
|
||||||
|
from lxml import html
|
||||||
|
from modules import util
|
||||||
|
from modules.util import Failed
|
||||||
|
from retrying import retry
|
||||||
|
|
||||||
|
logger = logging.getLogger("Plex Meta Manager")
|
||||||
|
|
||||||
|
class AniDBAPI:
|
||||||
|
def __init__(self, Cache=None, TMDb=None, Trakt=None):
|
||||||
|
self.Cache = Cache
|
||||||
|
self.TMDb = TMDb
|
||||||
|
self.Trakt = Trakt
|
||||||
|
self.urls = {
|
||||||
|
"anime": "https://anidb.net/anime",
|
||||||
|
"popular": "https://anidb.net/latest/anime/popular/?h=1",
|
||||||
|
"relation": "/relation/graph"
|
||||||
|
}
|
||||||
|
self.id_list = html.fromstring(requests.get("https://raw.githubusercontent.com/Anime-Lists/anime-lists/master/anime-list-master.xml").content)
|
||||||
|
|
||||||
|
def convert_anidb_to_tvdb(self, anidb_id): return self.convert_anidb(anidb_id, "anidbid", "tvdbid")
|
||||||
|
def convert_anidb_to_imdb(self, anidb_id): return self.convert_anidb(anidb_id, "anidbid", "imdbid")
|
||||||
|
def convert_tvdb_to_anidb(self, tvdb_id): return self.convert_anidb(tvdb_id, "tvdbid", "anidbid")
|
||||||
|
def convert_imdb_to_anidb(self, imdb_id): return self.convert_anidb(imdb_id, "imdbid", "anidbid")
|
||||||
|
def convert_anidb(self, input_id, from_id, to_id):
|
||||||
|
ids = self.id_list.xpath("//anime[@{}='{}']/@{}".format(from_id, input_id, to_id))
|
||||||
|
if len(ids) > 0:
|
||||||
|
if len(ids[0]) > 0: return ids[0] if to_id == "imdbid" else int(ids[0])
|
||||||
|
else: raise Failed("AniDB Error: No {} ID found for {} ID: {}".format(util.pretty_ids[to_id], util.pretty_ids[from_id], input_id))
|
||||||
|
else: raise Failed("AniDB Error: {} ID: {} not found".format(util.pretty_ids[from_id], input_id))
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||||
|
def send_request(self, url, language):
|
||||||
|
return requests.get(url, headers={"Accept-Language": language, "User-Agent": "Mozilla/5.0 x64"}).content
|
||||||
|
|
||||||
|
def get_popular(self, language):
|
||||||
|
response = html.fromstring(self.send_request(self.urls["popular"], language))
|
||||||
|
return util.get_int_list(response.xpath("//td[@class='name anime']/a/@href"), "AniDB ID")
|
||||||
|
|
||||||
|
def validate_anidb_id(self, anidb_id, language):
|
||||||
|
response = html.fromstring(self.send_request("{}/{}".format(self.urls["anime"], anidb_id), language))
|
||||||
|
ids = response.xpath("//*[text()='a{}']/text()".format(anidb_id))
|
||||||
|
if len(ids) > 0:
|
||||||
|
return util.regex_first_int(ids[0], "AniDB ID")
|
||||||
|
raise Failed("AniDB Error: AniDB ID: {} not found".format(anidb_id))
|
||||||
|
|
||||||
|
def get_anidb_relations(self, anidb_id, language):
|
||||||
|
response = html.fromstring(self.send_request("{}/{}{}".format(self.urls["anime"], anidb_id, self.urls["relation"]), language))
|
||||||
|
return util.get_int_list(response.xpath("//area/@href"), "AniDB ID")
|
||||||
|
|
||||||
|
def validate_anidb_list(self, anidb_list, language):
|
||||||
|
anidb_values = []
|
||||||
|
for anidb_id in anidb_list:
|
||||||
|
try:
|
||||||
|
anidb_values.append(self.validate_anidb_id(anidb_id, language))
|
||||||
|
except Failed as e:
|
||||||
|
logger.error(e)
|
||||||
|
if len(anidb_values) > 0:
|
||||||
|
return anidb_values
|
||||||
|
raise Failed("AniDB Error: No valid AniDB IDs in {}".format(anidb_list))
|
||||||
|
|
||||||
|
def get_items(self, method, data, language, status_message=True):
|
||||||
|
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||||
|
if status_message:
|
||||||
|
logger.debug("Data: {}".format(data))
|
||||||
|
anime_ids = []
|
||||||
|
if method == "anidb_popular":
|
||||||
|
if status_message:
|
||||||
|
logger.info("Processing {}: {} Anime".format(pretty, data))
|
||||||
|
anime_ids.extend(self.get_popular(language)[:data])
|
||||||
|
else:
|
||||||
|
if status_message: logger.info("Processing {}: {}".format(pretty, data))
|
||||||
|
if method == "anidb_id": anime_ids.append(data)
|
||||||
|
elif method == "anidb_relation": anime_ids.extend(self.get_anidb_relations(data, language))
|
||||||
|
else: raise Failed("AniDB Error: Method {} not supported".format(method))
|
||||||
|
show_ids = []
|
||||||
|
movie_ids = []
|
||||||
|
for anidb_id in anime_ids:
|
||||||
|
try:
|
||||||
|
tmdb_id, tvdb_id = self.convert_from_imdb(self.convert_anidb_to_imdb(anidb_id), language)
|
||||||
|
if tmdb_id: movie_ids.append(tmdb_id)
|
||||||
|
else: raise Failed
|
||||||
|
except Failed:
|
||||||
|
try: show_ids.append(self.convert_anidb_to_tvdb(anidb_id))
|
||||||
|
except Failed: logger.error("AniDB Error: No TVDb ID or IMDb ID found for AniDB ID: {}".format(anidb_id))
|
||||||
|
if status_message:
|
||||||
|
logger.debug("AniDB IDs Found: {}".format(anime_ids))
|
||||||
|
logger.debug("TMDb IDs Found: {}".format(movie_ids))
|
||||||
|
logger.debug("TVDb IDs Found: {}".format(show_ids))
|
||||||
|
return movie_ids, show_ids
|
||||||
|
|
||||||
|
def convert_from_imdb(self, imdb_id, language):
|
||||||
|
if self.Cache:
|
||||||
|
tmdb_id, tvdb_id = self.Cache.get_ids_from_imdb(imdb_id)
|
||||||
|
expired = False
|
||||||
|
if not tmdb_id:
|
||||||
|
tmdb_id, expired = self.Cache.get_tmdb_from_imdb(imdb_id)
|
||||||
|
if expired:
|
||||||
|
tmdb_id = None
|
||||||
|
else:
|
||||||
|
tmdb_id = None
|
||||||
|
from_cache = tmdb_id is not None
|
||||||
|
|
||||||
|
if not tmdb_id and self.TMDb:
|
||||||
|
try: tmdb_id = self.TMDb.convert_imdb_to_tmdb(imdb_id)
|
||||||
|
except Failed: pass
|
||||||
|
if not tmdb_id and self.Trakt:
|
||||||
|
try: tmdb_id = self.Trakt.convert_imdb_to_tmdb(imdb_id)
|
||||||
|
except Failed: pass
|
||||||
|
try:
|
||||||
|
if tmdb_id and not from_cache: self.TMDb.get_movie(tmdb_id)
|
||||||
|
except Failed: tmdb_id = None
|
||||||
|
if not tmdb_id: raise Failed("TVDb Error: No TMDb ID found for IMDb: {}".format(imdb_id))
|
||||||
|
if self.Cache and tmdb_id and expired is not False:
|
||||||
|
self.Cache.update_imdb("movie", expired, imdb_id, tmdb_id)
|
||||||
|
return tmdb_id
|
128
modules/cache.py
Normal file
128
modules/cache.py
Normal file
|
@ -0,0 +1,128 @@
|
||||||
|
import logging, os, random, sqlite3
|
||||||
|
from contextlib import closing
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
logger = logging.getLogger("Plex Meta Manager")
|
||||||
|
|
||||||
|
class Cache:
|
||||||
|
def __init__(self, config_path, expiration):
|
||||||
|
cache = "{}.cache".format(os.path.splitext(config_path)[0])
|
||||||
|
with sqlite3.connect(cache) as connection:
|
||||||
|
connection.row_factory = sqlite3.Row
|
||||||
|
with closing(connection.cursor()) as cursor:
|
||||||
|
cursor.execute("SELECT count(name) FROM sqlite_master WHERE type='table' AND name='guids'")
|
||||||
|
if cursor.fetchone()[0] == 0:
|
||||||
|
logger.info("Initializing cache database at {}".format(cache))
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE TABLE IF NOT EXISTS guids (
|
||||||
|
INTEGER PRIMARY KEY,
|
||||||
|
plex_guid TEXT,
|
||||||
|
tmdb_id TEXT,
|
||||||
|
imdb_id TEXT,
|
||||||
|
tvdb_id TEXT,
|
||||||
|
anidb_id TEXT,
|
||||||
|
mal_id TEXT,
|
||||||
|
expiration_date TEXT,
|
||||||
|
media_type TEXT)"""
|
||||||
|
)
|
||||||
|
cursor.execute(
|
||||||
|
"""CREATE TABLE IF NOT EXISTS imdb_map (
|
||||||
|
INTEGER PRIMARY KEY,
|
||||||
|
imdb_id TEXT,
|
||||||
|
t_id TEXT,
|
||||||
|
expiration_date TEXT,
|
||||||
|
media_type TEXT)"""
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.info("Using cache database at {}".format(cache))
|
||||||
|
self.expiration = expiration
|
||||||
|
self.cache_path = cache
|
||||||
|
|
||||||
|
def get_ids_from_imdb(self, imdb_id):
|
||||||
|
tmdb_id, tmdb_expired = self.get_tmdb_id("movie", imdb_id=imdb_id)
|
||||||
|
tvdb_id, tvdb_expired = self.get_tvdb_id("show", imdb_id=imdb_id)
|
||||||
|
return tmdb_id, tvdb_id
|
||||||
|
|
||||||
|
def get_tmdb_id(self, media_type, plex_guid=None, imdb_id=None, tvdb_id=None, anidb_id=None, mal_id=None):
|
||||||
|
return self.get_id_from(media_type, "tmdb_id", plex_guid=plex_guid, imdb_id=imdb_id, tvdb_id=tvdb_id, anidb_id=anidb_id, mal_id=mal_id)
|
||||||
|
|
||||||
|
def get_imdb_id(self, media_type, plex_guid=None, tmdb_id=None, tvdb_id=None, anidb_id=None, mal_id=None):
|
||||||
|
return self.get_id_from(media_type, "imdb_id", plex_guid=plex_guid, tmdb_id=tmdb_id, tvdb_id=tvdb_id, anidb_id=anidb_id, mal_id=mal_id)
|
||||||
|
|
||||||
|
def get_tvdb_id(self, media_type, plex_guid=None, tmdb_id=None, imdb_id=None, anidb_id=None, mal_id=None):
|
||||||
|
return self.get_id_from(media_type, "tvdb_id", plex_guid=plex_guid, tmdb_id=tmdb_id, imdb_id=imdb_id, anidb_id=anidb_id, mal_id=mal_id)
|
||||||
|
|
||||||
|
def get_anidb_id(self, media_type, plex_guid=None, tmdb_id=None, imdb_id=None, tvdb_id=None, mal_id=None):
|
||||||
|
return self.get_id_from(media_type, "anidb_id", plex_guid=plex_guid, tmdb_id=tmdb_id, imdb_id=imdb_id, tvdb_id=tvdb_id, mal_id=mal_id)
|
||||||
|
|
||||||
|
def get_mal_id(self, media_type, plex_guid=None, tmdb_id=None, imdb_id=None, tvdb_id=None, anidb_id=None):
|
||||||
|
return self.get_id_from(media_type, "anidb_id", plex_guid=plex_guid, tmdb_id=tmdb_id, imdb_id=imdb_id, tvdb_id=tvdb_id, anidb_id=anidb_id)
|
||||||
|
|
||||||
|
def get_id_from(self, media_type, id_from, plex_guid=None, tmdb_id=None, imdb_id=None, tvdb_id=None, anidb_id=None, mal_id=None):
|
||||||
|
if plex_guid: return self.get_id(media_type, "plex_guid", id_from, plex_guid)
|
||||||
|
elif tmdb_id: return self.get_id(media_type, "tmdb_id", id_from, tmdb_id)
|
||||||
|
elif imdb_id: return self.get_id(media_type, "imdb_id", id_from, imdb_id)
|
||||||
|
elif tvdb_id: return self.get_id(media_type, "tvdb_id", id_from, tvdb_id)
|
||||||
|
elif anidb_id: return self.get_id(media_type, "anidb_id", id_from, anidb_id)
|
||||||
|
elif mal_id: return self.get_id(media_type, "mal_id", id_from, mal_id)
|
||||||
|
else: return None, None
|
||||||
|
|
||||||
|
def get_id(self, media_type, from_id, to_id, key):
|
||||||
|
id_to_return = None
|
||||||
|
expired = None
|
||||||
|
with sqlite3.connect(self.cache_path) as connection:
|
||||||
|
connection.row_factory = sqlite3.Row
|
||||||
|
with closing(connection.cursor()) as cursor:
|
||||||
|
cursor.execute("SELECT * FROM guids WHERE {} = ? AND media_type = ?".format(from_id), (key, media_type))
|
||||||
|
row = cursor.fetchone()
|
||||||
|
if row and row[to_id]:
|
||||||
|
datetime_object = datetime.strptime(row["expiration_date"], "%Y-%m-%d")
|
||||||
|
time_between_insertion = datetime.now() - datetime_object
|
||||||
|
id_to_return = int(row[to_id])
|
||||||
|
expired = time_between_insertion.days > self.expiration
|
||||||
|
return id_to_return, expired
|
||||||
|
|
||||||
|
def update_guid(self, media_type, plex_guid, tmdb_id, imdb_id, tvdb_id, anidb_id, mal_id, expired):
|
||||||
|
expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.expiration)))
|
||||||
|
with sqlite3.connect(self.cache_path) as connection:
|
||||||
|
connection.row_factory = sqlite3.Row
|
||||||
|
with closing(connection.cursor()) as cursor:
|
||||||
|
cursor.execute("INSERT OR IGNORE INTO guids(plex_guid) VALUES(?)", (plex_guid,))
|
||||||
|
cursor.execute(
|
||||||
|
"""UPDATE guids SET
|
||||||
|
tmdb_id = ?,
|
||||||
|
imdb_id = ?,
|
||||||
|
tvdb_id = ?,
|
||||||
|
anidb_id = ?,
|
||||||
|
mal_id = ?,
|
||||||
|
expiration_date = ?,
|
||||||
|
media_type = ?
|
||||||
|
WHERE plex_guid = ?""", (tmdb_id, imdb_id, tvdb_id, anidb_id, mal_id, expiration_date.strftime("%Y-%m-%d"), media_type, plex_guid))
|
||||||
|
if imdb_id and (tmdb_id or tvdb_id):
|
||||||
|
cursor.execute("INSERT OR IGNORE INTO imdb_map(imdb_id) VALUES(?)", (imdb_id,))
|
||||||
|
cursor.execute("UPDATE imdb_map SET t_id = ?, expiration_date = ?, media_type = ? WHERE imdb_id = ?", (tmdb_id if media_type == "movie" else tvdb_id, expiration_date.strftime("%Y-%m-%d"), media_type, imdb_id))
|
||||||
|
|
||||||
|
def get_tmdb_from_imdb(self, imdb_id): return self.query_imdb_map("movie", imdb_id)
|
||||||
|
def get_tvdb_from_imdb(self, imdb_id): return self.query_imdb_map("show", imdb_id)
|
||||||
|
def query_imdb_map(self, media_type, imdb_id):
|
||||||
|
id_to_return = None
|
||||||
|
expired = None
|
||||||
|
with sqlite3.connect(self.cache_path) as connection:
|
||||||
|
connection.row_factory = sqlite3.Row
|
||||||
|
with closing(connection.cursor()) as cursor:
|
||||||
|
cursor.execute("SELECT * FROM imdb_map WHERE imdb_id = ? AND media_type = ?", (imdb_id, media_type))
|
||||||
|
row = cursor.fetchone()
|
||||||
|
if row and row["t_id"]:
|
||||||
|
datetime_object = datetime.strptime(row["expiration_date"], "%Y-%m-%d")
|
||||||
|
time_between_insertion = datetime.now() - datetime_object
|
||||||
|
id_to_return = int(row["t_id"])
|
||||||
|
expired = time_between_insertion.days > self.expiration
|
||||||
|
return id_to_return, expired
|
||||||
|
|
||||||
|
def update_imdb(self, media_type, expired, imdb_id, t_id):
|
||||||
|
expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.expiration)))
|
||||||
|
with sqlite3.connect(self.cache_path) as connection:
|
||||||
|
connection.row_factory = sqlite3.Row
|
||||||
|
with closing(connection.cursor()) as cursor:
|
||||||
|
cursor.execute("INSERT OR IGNORE INTO imdb_map(imdb_id) VALUES(?)", (imdb_id,))
|
||||||
|
cursor.execute("UPDATE imdb_map SET t_id = ?, expiration_date = ?, media_type = ? WHERE imdb_id = ?", (t_id, expiration_date.strftime("%Y-%m-%d"), media_type, imdb_id))
|
1185
modules/config.py
Normal file
1185
modules/config.py
Normal file
File diff suppressed because it is too large
Load diff
131
modules/imdb.py
Normal file
131
modules/imdb.py
Normal file
|
@ -0,0 +1,131 @@
|
||||||
|
import logging, math, re, requests, time
|
||||||
|
from lxml import html
|
||||||
|
from modules import util
|
||||||
|
from modules.util import Failed
|
||||||
|
from retrying import retry
|
||||||
|
|
||||||
|
logger = logging.getLogger("Plex Meta Manager")
|
||||||
|
|
||||||
|
class IMDbAPI:
|
||||||
|
def __init__(self, Cache=None, TMDb=None, Trakt=None, TVDb=None):
|
||||||
|
if TMDb is None and Trakt is None:
|
||||||
|
raise Failed("IMDb Error: IMDb requires either TMDb or Trakt")
|
||||||
|
self.Cache = Cache
|
||||||
|
self.TMDb = TMDb
|
||||||
|
self.Trakt = Trakt
|
||||||
|
self.TVDb = TVDb
|
||||||
|
|
||||||
|
def get_imdb_ids_from_url(self, imdb_url, language, limit):
|
||||||
|
imdb_url = imdb_url.strip()
|
||||||
|
if not imdb_url.startswith("https://www.imdb.com/list/ls") and not imdb_url.startswith("https://www.imdb.com/search/title/?"):
|
||||||
|
raise Failed("IMDb Error: {} must begin with either:\n| https://www.imdb.com/list/ls (For Lists)\n| https://www.imdb.com/search/title/? (For Searches)".format(imdb_url))
|
||||||
|
|
||||||
|
if imdb_url.startswith("https://www.imdb.com/list/ls"):
|
||||||
|
try: list_id = re.search("(\\d+)", str(imdb_url)).group(1)
|
||||||
|
except AttributeError: raise Failed("IMDb Error: Failed to parse List ID from {}".format(imdb_url))
|
||||||
|
current_url = "https://www.imdb.com/search/title/?lists=ls{}".format(list_id)
|
||||||
|
else:
|
||||||
|
current_url = imdb_url
|
||||||
|
header = {"Accept-Language": language}
|
||||||
|
length = 0
|
||||||
|
imdb_ids = []
|
||||||
|
response = self.send_request(current_url, header)
|
||||||
|
try: results = html.fromstring(response).xpath("//div[@class='desc']/span/text()")[0].replace(",", "")
|
||||||
|
except IndexError: raise Failed("IMDb Error: Failed to parse URL: {}".format(imdb_url))
|
||||||
|
try: total = int(re.findall("(\\d+) title", results)[0])
|
||||||
|
except IndexError: raise Failed("IMDb Error: No Results at URL: {}".format(imdb_url))
|
||||||
|
if "&start=" in current_url: current_url = re.sub("&start=\d+", "", current_url)
|
||||||
|
if "&count=" in current_url: current_url = re.sub("&count=\d+", "", current_url)
|
||||||
|
if limit < 1 or total < limit: limit = total
|
||||||
|
remainder = limit % 250
|
||||||
|
if remainder == 0: remainder = 250
|
||||||
|
num_of_pages = math.ceil(int(limit) / 250)
|
||||||
|
for i in range(1, num_of_pages + 1):
|
||||||
|
start_num = (i - 1) * 250 + 1
|
||||||
|
length = util.print_return(length, "Parsing Page {}/{} {}-{}".format(i, num_of_pages, start_num, limit if i == num_of_pages else i * 250))
|
||||||
|
response = self.send_request("{}&count={}&start={}".format(current_url, remainder if i == num_of_pages else 250, start_num), header)
|
||||||
|
imdb_ids.extend(html.fromstring(response).xpath("//div[contains(@class, 'lister-item-image')]//a/img//@data-tconst"))
|
||||||
|
util.print_end(length)
|
||||||
|
if imdb_ids: return imdb_ids
|
||||||
|
else: raise Failed("IMDb Error: No Movies Found at {}".format(imdb_url))
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||||
|
def send_request(self, url, header):
|
||||||
|
return requests.get(url, headers=header).content
|
||||||
|
|
||||||
|
def get_items(self, method, data, language, status_message=True):
|
||||||
|
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||||
|
if status_message:
|
||||||
|
logger.debug("Data: {}".format(data))
|
||||||
|
show_ids = []
|
||||||
|
movie_ids = []
|
||||||
|
if method == "imdb_id":
|
||||||
|
if status_message:
|
||||||
|
logger.info("Processing {}: {}".format(pretty, data))
|
||||||
|
tmdb_id, tvdb_id = self.convert_from_imdb(data, language)
|
||||||
|
if tmdb_id: movie_ids.append(tmdb_id)
|
||||||
|
if tvdb_id: show_ids.append(tvdb_id)
|
||||||
|
elif method == "imdb_list":
|
||||||
|
if status_message:
|
||||||
|
logger.info("Processing {}: {}".format(pretty,"{} Items at {}".format(data["limit"], data["url"]) if data["limit"] > 0 else data["url"]))
|
||||||
|
imdb_ids = self.get_imdb_ids_from_url(data["url"], language, data["limit"])
|
||||||
|
total_ids = len(imdb_ids)
|
||||||
|
length = 0
|
||||||
|
for i, imdb_id in enumerate(imdb_ids, 1):
|
||||||
|
length = util.print_return(length, "Converting IMDb ID {}/{}".format(i, total_ids))
|
||||||
|
try:
|
||||||
|
tmdb_id, tvdb_id = self.convert_from_imdb(imdb_id, language)
|
||||||
|
if tmdb_id: movie_ids.append(tmdb_id)
|
||||||
|
if tvdb_id: show_ids.append(tvdb_id)
|
||||||
|
except Failed as e: logger.warning(e)
|
||||||
|
util.print_end(length, "Processed {} IMDb IDs".format(total_ids))
|
||||||
|
else:
|
||||||
|
raise Failed("IMDb Error: Method {} not supported".format(method))
|
||||||
|
if status_message:
|
||||||
|
logger.debug("TMDb IDs Found: {}".format(movie_ids))
|
||||||
|
logger.debug("TVDb IDs Found: {}".format(show_ids))
|
||||||
|
return movie_ids, show_ids
|
||||||
|
|
||||||
|
def convert_from_imdb(self, imdb_id, language):
|
||||||
|
if self.Cache:
|
||||||
|
tmdb_id, tvdb_id = self.Cache.get_ids_from_imdb(imdb_id)
|
||||||
|
update_tmdb = False
|
||||||
|
if not tmdb_id:
|
||||||
|
tmdb_id, update_tmdb = self.Cache.get_tmdb_from_imdb(imdb_id)
|
||||||
|
if update_tmdb:
|
||||||
|
tmdb_id = None
|
||||||
|
update_tvdb = False
|
||||||
|
if not tvdb_id:
|
||||||
|
tvdb_id, update_tvdb = self.Cache.get_tvdb_from_imdb(imdb_id)
|
||||||
|
if update_tvdb:
|
||||||
|
tvdb_id = None
|
||||||
|
else:
|
||||||
|
tmdb_id = None
|
||||||
|
tvdb_id = None
|
||||||
|
from_cache = tmdb_id is not None or tvdb_id is not None
|
||||||
|
|
||||||
|
if not tmdb_id and not tvdb_id and self.TMDb:
|
||||||
|
try: tmdb_id = self.TMDb.convert_imdb_to_tmdb(imdb_id)
|
||||||
|
except Failed: pass
|
||||||
|
if not tmdb_id and not tvdb_id and self.TMDb:
|
||||||
|
try: tvdb_id = self.TMDb.convert_imdb_to_tvdb(imdb_id)
|
||||||
|
except Failed: pass
|
||||||
|
if not tmdb_id and not tvdb_id and self.Trakt:
|
||||||
|
try: tmdb_id = self.Trakt.convert_imdb_to_tmdb(imdb_id)
|
||||||
|
except Failed: pass
|
||||||
|
if not tmdb_id and not tvdb_id and self.Trakt:
|
||||||
|
try: tvdb_id = self.Trakt.convert_imdb_to_tvdb(imdb_id)
|
||||||
|
except Failed: pass
|
||||||
|
try:
|
||||||
|
if tmdb_id and not from_cache: self.TMDb.get_movie(tmdb_id)
|
||||||
|
except Failed: tmdb_id = None
|
||||||
|
try:
|
||||||
|
if tvdb_id and not from_cache: self.TVDb.get_series(language, tvdb_id=tvdb_id)
|
||||||
|
except Failed: tvdb_id = None
|
||||||
|
if not tmdb_id and not tvdb_id : raise Failed("IMDb Error: No TMDb ID or TVDb ID found for IMDb: {}".format(imdb_id))
|
||||||
|
if self.Cache:
|
||||||
|
if tmdb_id and update_tmdb is not False:
|
||||||
|
self.Cache.update_imdb("movie", update_tmdb, imdb_id, tmdb_id)
|
||||||
|
if tvdb_id and update_tvdb is not False:
|
||||||
|
self.Cache.update_imdb("show", update_tvdb, imdb_id, tvdb_id)
|
||||||
|
return tmdb_id, tvdb_id
|
193
modules/mal.py
Normal file
193
modules/mal.py
Normal file
|
@ -0,0 +1,193 @@
|
||||||
|
import json, logging, re, requests, secrets, webbrowser
|
||||||
|
from modules import util
|
||||||
|
from modules.util import Failed, TimeoutExpired
|
||||||
|
from retrying import retry
|
||||||
|
from ruamel import yaml
|
||||||
|
|
||||||
|
logger = logging.getLogger("Plex Meta Manager")
|
||||||
|
|
||||||
|
class MyAnimeListIDList:
|
||||||
|
def __init__(self):
|
||||||
|
self.ids = json.loads(requests.get("https://raw.githubusercontent.com/Fribb/anime-lists/master/animeMapping_full.json").content)
|
||||||
|
|
||||||
|
def convert_mal_to_tvdb(self, mal_id): return self.convert_mal(mal_id, "mal_id", "thetvdb_id")
|
||||||
|
def convert_mal_to_tmdb(self, mal_id): return self.convert_mal(mal_id, "mal_id", "themoviedb_id")
|
||||||
|
def convert_tvdb_to_mal(self, tvdb_id): return self.convert_mal(tvdb_id, "thetvdb_id", "mal_id")
|
||||||
|
def convert_tmdb_to_mal(self, tmdb_id): return self.convert_mal(tmdb_id, "themoviedb_id", "mal_id")
|
||||||
|
def convert_mal(self, input_id, from_id, to_id):
|
||||||
|
for attrs in self.ids:
|
||||||
|
if from_id in attrs and int(attrs[from_id]) == int(input_id) and to_id in attrs and int(attrs[to_id]) > 0:
|
||||||
|
return attrs[to_id]
|
||||||
|
raise Failed("MyAnimeList Error: {} ID not found for {}: {}".format(util.pretty_ids[to_id], util.pretty_ids[from_id], input_id))
|
||||||
|
|
||||||
|
def find_mal_ids(self, mal_id):
|
||||||
|
for mal in self.ids:
|
||||||
|
if "mal_id" in mal and int(mal["mal_id"]) == int(mal_id):
|
||||||
|
return mal
|
||||||
|
raise Failed("MyAnimeList Error: MyAnimeList ID: {} not found".format(mal_id))
|
||||||
|
|
||||||
|
class MyAnimeListAPI:
|
||||||
|
def __init__(self, params, MyAnimeListIDList, authorization=None):
|
||||||
|
self.urls = {
|
||||||
|
"oauth_token": "https://myanimelist.net/v1/oauth2/token",
|
||||||
|
"oauth_authorize": "https://myanimelist.net/v1/oauth2/authorize",
|
||||||
|
"ranking": "https://api.myanimelist.net/v2/anime/ranking",
|
||||||
|
"season": "https://api.myanimelist.net/v2/anime/season",
|
||||||
|
"suggestions": "https://api.myanimelist.net/v2/anime/suggestions",
|
||||||
|
"user": "https://api.myanimelist.net/v2/users"
|
||||||
|
}
|
||||||
|
self.client_id = params["client_id"]
|
||||||
|
self.client_secret = params["client_secret"]
|
||||||
|
self.config_path = params["config_path"]
|
||||||
|
self.authorization = authorization
|
||||||
|
self.MyAnimeListIDList = MyAnimeListIDList
|
||||||
|
if not self.save_authorization(self.authorization):
|
||||||
|
if not self.refresh_authorization():
|
||||||
|
self.get_authorization()
|
||||||
|
|
||||||
|
def get_authorization(self):
|
||||||
|
code_verifier = secrets.token_urlsafe(100)[:128]
|
||||||
|
url = "{}?response_type=code&client_id={}&code_challenge={}".format(self.urls["oauth_authorize"], self.client_id, code_verifier)
|
||||||
|
logger.info("")
|
||||||
|
logger.info("Navigate to: {}".format(url))
|
||||||
|
logger.info("")
|
||||||
|
logger.info("Login and click the Allow option. You will then be redirected to a localhost")
|
||||||
|
logger.info("url that most likely won't load, which is fine. Copy the URL and paste it below")
|
||||||
|
webbrowser.open(url, new=2)
|
||||||
|
try: url = util.logger_input("URL").strip()
|
||||||
|
except TimeoutExpired: raise Failed("Input Timeout: URL required.")
|
||||||
|
if not url: raise Failed("MyAnimeList Error: No input MyAnimeList code required.")
|
||||||
|
match = re.search("code=([^&]+)", str(url))
|
||||||
|
if not match:
|
||||||
|
raise Failed("MyAnimeList Error: Invalid URL")
|
||||||
|
code = match.group(1)
|
||||||
|
data = {
|
||||||
|
"client_id": self.client_id,
|
||||||
|
"client_secret": self.client_secret,
|
||||||
|
"code": code,
|
||||||
|
"code_verifier": code_verifier,
|
||||||
|
"grant_type": "authorization_code"
|
||||||
|
}
|
||||||
|
new_authorization = self.oauth_request(data)
|
||||||
|
if "error" in new_authorization:
|
||||||
|
raise Failed("MyAnimeList Error: Invalid code")
|
||||||
|
if not self.save_authorization(new_authorization):
|
||||||
|
raise Failed("MyAnimeList Error: New Authorization Failed")
|
||||||
|
|
||||||
|
def check_authorization(self, authorization):
|
||||||
|
try:
|
||||||
|
self.send_request(self.urls["suggestions"], authorization=authorization)
|
||||||
|
return True
|
||||||
|
except Failed as e:
|
||||||
|
logger.debug(e)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def refresh_authorization(self):
|
||||||
|
if self.authorization and "refresh_token" in self.authorization and self.authorization["refresh_token"]:
|
||||||
|
logger.info("Refreshing Access Token...")
|
||||||
|
data = {
|
||||||
|
"client_id": self.client_id,
|
||||||
|
"client_secret": self.client_secret,
|
||||||
|
"refresh_token": self.authorization["refresh_token"],
|
||||||
|
"grant_type": "refresh_token"
|
||||||
|
}
|
||||||
|
refreshed_authorization = self.oauth_request(data)
|
||||||
|
return self.save_authorization(refreshed_authorization)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def save_authorization(self, authorization):
|
||||||
|
if authorization is not None and "access_token" in authorization and authorization["access_token"] and self.check_authorization(authorization):
|
||||||
|
if self.authorization != authorization:
|
||||||
|
yaml.YAML().allow_duplicate_keys = True
|
||||||
|
config, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.config_path))
|
||||||
|
config["mal"]["authorization"] = {
|
||||||
|
"access_token": authorization["access_token"],
|
||||||
|
"token_type": authorization["token_type"],
|
||||||
|
"expires_in": authorization["expires_in"],
|
||||||
|
"refresh_token": authorization["refresh_token"]
|
||||||
|
}
|
||||||
|
logger.info("Saving authorization information to {}".format(self.config_path))
|
||||||
|
yaml.round_trip_dump(config, open(self.config_path, "w"), indent=ind, block_seq_indent=bsi)
|
||||||
|
self.authorization = authorization
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||||
|
def oauth_request(self, data):
|
||||||
|
return requests.post(self.urls["oauth_token"], data).json()
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||||
|
def send_request(self, url, authorization=None):
|
||||||
|
new_authorization = authorization if authorization else self.authorization
|
||||||
|
response = requests.get(url, headers={"Authorization": "Bearer {}".format(new_authorization["access_token"])}).json()
|
||||||
|
if "error" in response: raise Failed("MyAnimeList Error: {}".format(response["error"]))
|
||||||
|
else: return response
|
||||||
|
|
||||||
|
def parse_mal_ids(self, data):
|
||||||
|
mal_ids = []
|
||||||
|
if "data" in data:
|
||||||
|
for d in data["data"]:
|
||||||
|
mal_ids.append(d["node"]["id"])
|
||||||
|
return mal_ids
|
||||||
|
|
||||||
|
def get_username(self):
|
||||||
|
return self.send_request("{}/@me".format(self.urls["user"]))["name"]
|
||||||
|
|
||||||
|
def get_ranked(self, ranking_type, limit):
|
||||||
|
url = "{}?ranking_type={}&limit={}".format(self.urls["ranking"], ranking_type, limit)
|
||||||
|
return self.parse_mal_ids(self.send_request(url))
|
||||||
|
|
||||||
|
def get_season(self, season, year, sort_by, limit):
|
||||||
|
url = "{}/{}/{}?sort={}&limit={}".format(self.urls["season"], year, season, sort_by, limit)
|
||||||
|
return self.parse_mal_ids(self.send_request(url))
|
||||||
|
|
||||||
|
def get_suggestions(self, limit):
|
||||||
|
url = "{}?limit={}".format(self.urls["suggestions"], limit)
|
||||||
|
return self.parse_mal_ids(self.send_request(url))
|
||||||
|
|
||||||
|
def get_userlist(self, username, status, sort_by, limit):
|
||||||
|
url = "{}/{}/animelist?{}sort={}&limit={}".format(self.urls["user"], username, "" if status == "all" else "status={}&".format(status), sort_by, limit)
|
||||||
|
return self.parse_mal_ids(self.send_request(url))
|
||||||
|
|
||||||
|
def get_items(self, method, data, status_message=True):
|
||||||
|
if status_message:
|
||||||
|
logger.debug("Data: {}".format(data))
|
||||||
|
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||||
|
if method == "mal_id":
|
||||||
|
mal_ids = [data]
|
||||||
|
if status_message:
|
||||||
|
logger.info("Processing {}: {}".format(pretty, data))
|
||||||
|
elif method in util.mal_ranked_name:
|
||||||
|
mal_ids = self.get_ranked(util.mal_ranked_name[method], data)
|
||||||
|
if status_message:
|
||||||
|
logger.info("Processing {}: {} Anime".format(pretty, data))
|
||||||
|
elif method == "mal_season":
|
||||||
|
mal_ids = self.get_season(data["season"], data["year"], data["sort_by"], data["limit"])
|
||||||
|
if status_message:
|
||||||
|
logger.info("Processing {}: {} Anime from {} {} sorted by {}".format(pretty, data["limit"], util.pretty_seasons[data["season"]], data["year"], util.mal_pretty[data["sort_by"]]))
|
||||||
|
elif method == "mal_suggested":
|
||||||
|
mal_ids = self.get_suggestions(data)
|
||||||
|
if status_message:
|
||||||
|
logger.info("Processing {}: {} Anime".format(pretty, data))
|
||||||
|
elif method == "mal_userlist":
|
||||||
|
mal_ids = self.get_userlist(data["username"], data["status"], data["sort_by"], data["limit"])
|
||||||
|
if status_message:
|
||||||
|
logger.info("Processing {}: {} Anime from {}'s {} list sorted by {}".format(pretty, data["limit"], self.get_username() if data["username"] == "@me" else data["username"], util.mal_pretty[data["status"]], util.mal_pretty[data["sort_by"]]))
|
||||||
|
else:
|
||||||
|
raise Failed("MyAnimeList Error: Method {} not supported".format(method))
|
||||||
|
show_ids = []
|
||||||
|
movie_ids = []
|
||||||
|
for mal_id in mal_ids:
|
||||||
|
try:
|
||||||
|
ids = self.MyAnimeListIDList.find_mal_ids(mal_id)
|
||||||
|
if "thetvdb_id" in ids and int(ids["thetvdb_id"]) > 0: show_ids.append(int(ids["thetvdb_id"]))
|
||||||
|
elif "themoviedb_id" in ids and int(ids["themoviedb_id"]) > 0: movie_ids.append(int(ids["themoviedb_id"]))
|
||||||
|
else: raise Failed("MyAnimeList Error: MyAnimeList ID: {} has no other IDs associated with it".format(mal_id))
|
||||||
|
except Failed as e:
|
||||||
|
if status_message:
|
||||||
|
logger.error(e)
|
||||||
|
if status_message:
|
||||||
|
logger.debug("MyAnimeList IDs Found: {}".format(mal_ids))
|
||||||
|
logger.debug("Shows Found: {}".format(show_ids))
|
||||||
|
logger.debug("Movies Found: {}".format(movie_ids))
|
||||||
|
return movie_ids, show_ids
|
430
modules/plex.py
Normal file
430
modules/plex.py
Normal file
|
@ -0,0 +1,430 @@
|
||||||
|
import logging, os, requests
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from modules import util
|
||||||
|
from modules.radarr import RadarrAPI
|
||||||
|
from modules.sonarr import SonarrAPI
|
||||||
|
from modules.tautulli import TautulliAPI
|
||||||
|
from modules.util import Failed
|
||||||
|
from plexapi.exceptions import BadRequest, NotFound, Unauthorized
|
||||||
|
from plexapi.library import Collections, MovieSection, ShowSection
|
||||||
|
from plexapi.server import PlexServer
|
||||||
|
from plexapi.video import Movie, Show
|
||||||
|
from retrying import retry
|
||||||
|
from ruamel import yaml
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
from urllib.request import Request
|
||||||
|
from urllib.request import urlopen
|
||||||
|
|
||||||
|
logger = logging.getLogger("Plex Meta Manager")
|
||||||
|
|
||||||
|
class PlexAPI:
|
||||||
|
def __init__(self, params):
|
||||||
|
try: self.PlexServer = PlexServer(params["plex"]["url"], params["plex"]["token"], timeout=60)
|
||||||
|
except Unauthorized: raise Failed("Plex Error: Plex token is invalid")
|
||||||
|
except ValueError as e: raise Failed("Plex Error: {}".format(e))
|
||||||
|
except requests.exceptions.ConnectionError as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
raise Failed("Plex Error: Plex url is invalid")
|
||||||
|
self.is_movie = params["library_type"] == "movie"
|
||||||
|
self.is_show = params["library_type"] == "show"
|
||||||
|
self.Plex = next((s for s in self.PlexServer.library.sections() if s.title == params["name"] and ((self.is_movie and isinstance(s, MovieSection)) or (self.is_show and isinstance(s, ShowSection)))), None)
|
||||||
|
if not self.Plex: raise Failed("Plex Error: Plex Library {} not found".format(params["name"]))
|
||||||
|
try: self.data, ind, bsi = yaml.util.load_yaml_guess_indent(open(params["metadata_path"], encoding="utf-8"))
|
||||||
|
except yaml.scanner.ScannerError as e: raise Failed("YAML Error: {}".format(str(e).replace("\n", "\n|\t ")))
|
||||||
|
|
||||||
|
self.metadata = None
|
||||||
|
if "metadata" in self.data:
|
||||||
|
if self.data["metadata"]: self.metadata = self.data["metadata"]
|
||||||
|
else: logger.warning("Config Warning: metadata attribute is blank")
|
||||||
|
else: logger.warning("Config Warning: metadata attribute not found")
|
||||||
|
|
||||||
|
self.collections = None
|
||||||
|
if "collections" in self.data:
|
||||||
|
if self.data["collections"]: self.collections = self.data["collections"]
|
||||||
|
else: logger.warning("Config Warning: collections attribute is blank")
|
||||||
|
else: logger.warning("Config Warning: collections attribute not found")
|
||||||
|
|
||||||
|
if self.metadata is None and self.collections is None:
|
||||||
|
raise Failed("YAML Error: metadata attributes or collections attribute required")
|
||||||
|
|
||||||
|
if params["asset_directory"]:
|
||||||
|
logger.info("Using Asset Directory: {}".format(params["asset_directory"]))
|
||||||
|
|
||||||
|
self.Radarr = None
|
||||||
|
if params["tmdb"] and params["radarr"]:
|
||||||
|
logger.info("Connecting to {} library's Radarr...".format(params["name"]))
|
||||||
|
try: self.Radarr = RadarrAPI(params["tmdb"], params["radarr"])
|
||||||
|
except Failed as e: logger.error(e)
|
||||||
|
logger.info("{} library's Radarr Connection {}".format(params["name"], "Failed" if self.Radarr is None else "Successful"))
|
||||||
|
|
||||||
|
self.Sonarr = None
|
||||||
|
if params["tvdb"] and params["sonarr"]:
|
||||||
|
logger.info("Connecting to {} library's Sonarr...".format(params["name"]))
|
||||||
|
try: self.Sonarr = SonarrAPI(params["tvdb"], params["sonarr"], self.Plex.language)
|
||||||
|
except Failed as e: logger.error(e)
|
||||||
|
logger.info("{} library's Sonarr Connection {}".format(params["name"], "Failed" if self.Sonarr is None else "Successful"))
|
||||||
|
|
||||||
|
self.Tautulli = None
|
||||||
|
if params["tautulli"]:
|
||||||
|
logger.info("Connecting to {} library's Tautulli...".format(params["name"]))
|
||||||
|
try: self.Tautulli = TautulliAPI(params["tautulli"])
|
||||||
|
except Failed as e: logger.error(e)
|
||||||
|
logger.info("{} library's Tautulli Connection {}".format(params["name"], "Failed" if self.Tautulli is None else "Successful"))
|
||||||
|
|
||||||
|
self.name = params["name"]
|
||||||
|
|
||||||
|
self.missing_path = os.path.join(os.path.dirname(os.path.abspath(params["metadata_path"])), "{}_missing.yml".format(os.path.splitext(os.path.basename(params["metadata_path"]))[0]))
|
||||||
|
self.metadata_path = params["metadata_path"]
|
||||||
|
self.asset_directory = params["asset_directory"]
|
||||||
|
self.sync_mode = params["sync_mode"]
|
||||||
|
self.plex = params["plex"]
|
||||||
|
self.radarr = params["radarr"]
|
||||||
|
self.sonarr = params["sonarr"]
|
||||||
|
self.tautulli = params["tautulli"]
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||||
|
def search(self, title, libtype=None, year=None):
|
||||||
|
if libtype is not None and year is not None: return self.Plex.search(title=title, year=year, libtype=libtype)
|
||||||
|
elif libtype is not None: return self.Plex.search(title=title, libtype=libtype)
|
||||||
|
elif year is not None: return self.Plex.search(title=title, year=year)
|
||||||
|
else: return self.Plex.search(title=title)
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||||
|
def fetchItem(self, data):
|
||||||
|
return self.PlexServer.fetchItem(data)
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||||
|
def server_search(self, data):
|
||||||
|
return self.PlexServer.search(data)
|
||||||
|
|
||||||
|
def get_all_collections(self):
|
||||||
|
return self.Plex.search(libtype="collection")
|
||||||
|
|
||||||
|
def get_collection(self, data):
|
||||||
|
collection = util.choose_from_list(self.search(str(data), libtype="collection"), "collection", str(data), exact=True)
|
||||||
|
if collection: return collection
|
||||||
|
else: raise Failed("Plex Error: Collection {} not found".format(data))
|
||||||
|
|
||||||
|
def get_item(self, data, year=None):
|
||||||
|
if isinstance(data, (int, Movie, Show)):
|
||||||
|
try: return self.fetchItem(data.ratingKey if isinstance(data, (Movie, Show)) else data)
|
||||||
|
except BadRequest: raise Failed("Plex Error: Item {} not found".format(data))
|
||||||
|
else:
|
||||||
|
item_list = self.search(title=data) if year is None else self.search(data, year=year)
|
||||||
|
item = util.choose_from_list(item_list, "movie" if self.is_movie else "show", data)
|
||||||
|
if item: return item
|
||||||
|
else: raise Failed("Plex Error: Item {} not found".format(data))
|
||||||
|
|
||||||
|
def validate_collections(self, collections):
|
||||||
|
valid_collections = []
|
||||||
|
for collection in collections:
|
||||||
|
try: valid_collections.append(self.get_collection(collection))
|
||||||
|
except Failed as e: logger.error(e)
|
||||||
|
if len(valid_collections) == 0:
|
||||||
|
raise Failed("Collection Error: No valid Plex Collections in {}".format(collections[c][m]))
|
||||||
|
return valid_collections
|
||||||
|
|
||||||
|
def get_actor_rating_key(self, data):
|
||||||
|
movie_rating_key = None
|
||||||
|
for result in self.server_search(data):
|
||||||
|
entry = str(result).split(":")
|
||||||
|
entry[0] = entry[0][1:]
|
||||||
|
if entry[0] == "Movie":
|
||||||
|
movie_rating_key = int(entry[1])
|
||||||
|
break
|
||||||
|
if movie_rating_key:
|
||||||
|
for role in self.fetchItem(movie_rating_key).roles:
|
||||||
|
role = str(role).split(":")
|
||||||
|
if data.upper().replace(" ", "-") == role[2][:-1].upper():
|
||||||
|
return int(role[1])
|
||||||
|
raise Failed("Plex Error: Actor: {} not found".format(data))
|
||||||
|
|
||||||
|
def get_ids(self, movie):
|
||||||
|
req = Request("{}{}".format(self.url, movie.key))
|
||||||
|
req.add_header("X-Plex-Token", self.token)
|
||||||
|
req.add_header("User-Agent", "Mozilla/5.0")
|
||||||
|
with urlopen(req) as response:
|
||||||
|
contents = response.read()
|
||||||
|
tmdb_id = None
|
||||||
|
imdb_id = None
|
||||||
|
for guid_tag in BeautifulSoup(contents, "lxml").find_all("guid"):
|
||||||
|
agent = urlparse(guid_tag["id"]).scheme
|
||||||
|
guid = urlparse(guid_tag["id"]).netloc
|
||||||
|
if agent == "tmdb": tmdb_id = guid
|
||||||
|
elif agent == "imdb": imdb_id = guid
|
||||||
|
return tmdb_id, imdb_id
|
||||||
|
|
||||||
|
def del_collection_if_empty(self, collection):
|
||||||
|
missing_data = {}
|
||||||
|
if not os.path.exists(self.missing_path):
|
||||||
|
with open(self.missing_path, "w"): pass
|
||||||
|
try:
|
||||||
|
missing_data, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.missing_path))
|
||||||
|
if not missing_data:
|
||||||
|
missing_data = {}
|
||||||
|
if collection in missing_data and len(missing_data[collection]) == 0:
|
||||||
|
del missing_data[collection]
|
||||||
|
yaml.round_trip_dump(missing_data, open(self.missing_path, "w"), indent=ind, block_seq_indent=bsi)
|
||||||
|
except yaml.scanner.ScannerError as e:
|
||||||
|
logger.error("YAML Error: {}".format(str(e).replace("\n", "\n|\t ")))
|
||||||
|
|
||||||
|
def clear_collection_missing(self, collection):
|
||||||
|
missing_data = {}
|
||||||
|
if not os.path.exists(self.missing_path):
|
||||||
|
with open(self.missing_path, "w"): pass
|
||||||
|
try:
|
||||||
|
missing_data, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.missing_path))
|
||||||
|
if not missing_data:
|
||||||
|
missing_data = {}
|
||||||
|
if collection in missing_data:
|
||||||
|
missing_data[collection.encode("ascii", "replace").decode()] = {}
|
||||||
|
yaml.round_trip_dump(missing_data, open(self.missing_path, "w"), indent=ind, block_seq_indent=bsi)
|
||||||
|
except yaml.scanner.ScannerError as e:
|
||||||
|
logger.error("YAML Error: {}".format(str(e).replace("\n", "\n|\t ")))
|
||||||
|
|
||||||
|
def save_missing(self, collection, items, is_movie):
|
||||||
|
missing_data = {}
|
||||||
|
if not os.path.exists(self.missing_path):
|
||||||
|
with open(self.missing_path, "w"): pass
|
||||||
|
try:
|
||||||
|
missing_data, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.missing_path))
|
||||||
|
if not missing_data:
|
||||||
|
missing_data = {}
|
||||||
|
col_name = collection.encode("ascii", "replace").decode()
|
||||||
|
if col_name not in missing_data:
|
||||||
|
missing_data[col_name] = {}
|
||||||
|
section = "Movies Missing (TMDb IDs)" if is_movie else "Shows Missing (TVDb IDs)"
|
||||||
|
if section not in missing_data[col_name]:
|
||||||
|
missing_data[col_name][section] = {}
|
||||||
|
for title, item_id in items:
|
||||||
|
missing_data[col_name][section][int(item_id)] = str(title).encode("ascii", "replace").decode()
|
||||||
|
yaml.round_trip_dump(missing_data, open(self.missing_path, "w"), indent=ind, block_seq_indent=bsi)
|
||||||
|
except yaml.scanner.ScannerError as e:
|
||||||
|
logger.error("YAML Error: {}".format(str(e).replace("\n", "\n|\t ")))
|
||||||
|
|
||||||
|
def add_to_collection(self, collection, items, filters, map={}):
|
||||||
|
name = collection.title if isinstance(collection, Collections) else collection
|
||||||
|
collection_items = collection.children if isinstance(collection, Collections) else []
|
||||||
|
|
||||||
|
total = len(items)
|
||||||
|
max_length = len(str(total))
|
||||||
|
length = 0
|
||||||
|
for i, item in enumerate(items, 1):
|
||||||
|
current = self.get_item(item)
|
||||||
|
match = True
|
||||||
|
if filters:
|
||||||
|
length = util.print_return(length, "Filtering {}/{} {}".format((" " * (max_length - len(str(i)))) + str(i), total, current.title))
|
||||||
|
for f in filters:
|
||||||
|
modifier = f[0][-4:]
|
||||||
|
method = util.filter_alias[f[0][:-4]] if modifier in [".not", ".lte", ".gte"] else util.filter_alias[f[0]]
|
||||||
|
if method == "max_age":
|
||||||
|
threshold_date = datetime.now() - timedelta(days=f[1])
|
||||||
|
attr = getattr(current, "originallyAvailableAt")
|
||||||
|
if attr is None or attr < threshold_date:
|
||||||
|
match = False
|
||||||
|
break
|
||||||
|
elif modifier in [".gte", ".lte"]:
|
||||||
|
if method == "originallyAvailableAt":
|
||||||
|
threshold_date = datetime.strptime(f[1], "%m/%d/%y")
|
||||||
|
attr = getattr(current, "originallyAvailableAt")
|
||||||
|
if (modifier == ".lte" and attr > threshold_date) or (modifier == ".gte" and attr < threshold_date):
|
||||||
|
match = False
|
||||||
|
break
|
||||||
|
elif method in ["year", "rating"]:
|
||||||
|
attr = getattr(current, method)
|
||||||
|
if (modifier == ".lte" and attr > f[1]) or (modifier == ".gte" and attr < f[1]):
|
||||||
|
match = False
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
terms = f[1] if isinstance(f[1], list) else str(f[1]).split(", ")
|
||||||
|
if method in ["video_resolution", "audio_language", "subtitle_language"]:
|
||||||
|
for media in current.media:
|
||||||
|
if method == "video_resolution": attrs = [media.videoResolution]
|
||||||
|
for part in media.parts:
|
||||||
|
if method == "audio_language": attrs = ([a.language for a in part.audioStreams()])
|
||||||
|
if method == "subtitle_language": attrs = ([s.language for s in part.subtitleStreams()])
|
||||||
|
elif method in ["contentRating", "studio", "year", "rating", "originallyAvailableAt"]: attrs = [str(getattr(current, method))]
|
||||||
|
elif method in ["actors", "countries", "directors", "genres", "writers", "collections"]: attrs = [getattr(x, "tag") for x in getattr(current, method)]
|
||||||
|
|
||||||
|
if (not list(set(terms) & set(attrs)) and modifier != ".not") or (list(set(terms) & set(attrs)) and modifier == ".not"):
|
||||||
|
match = False
|
||||||
|
break
|
||||||
|
length = util.print_return(length, "Filtering {}/{} {}".format((" " * (max_length - len(str(i)))) + str(i), total, current.title))
|
||||||
|
if match:
|
||||||
|
util.print_end(length, "{} Collection | {} | {}".format(name, "=" if current in collection_items else "+", current.title))
|
||||||
|
if current in collection_items: map[current.ratingKey] = None
|
||||||
|
else: current.addCollection(name)
|
||||||
|
media_type = "{}{}".format("Movie" if self.is_movie else "Show", "s" if total > 1 else "")
|
||||||
|
util.print_end(length, "{} {} Processed".format(total, media_type))
|
||||||
|
return map
|
||||||
|
|
||||||
|
def update_metadata(self):
|
||||||
|
logger.info("")
|
||||||
|
util.seperator("{} Library Metadata".format(self.name))
|
||||||
|
logger.info("")
|
||||||
|
if not self.metadata:
|
||||||
|
raise Failed("No metadata to edit")
|
||||||
|
for m in self.metadata:
|
||||||
|
logger.info("")
|
||||||
|
util.seperator()
|
||||||
|
logger.info("")
|
||||||
|
year = None
|
||||||
|
if "year" in self.metadata[m]:
|
||||||
|
now = datetime.datetime.now()
|
||||||
|
if self.metadata[m]["year"] is None: logger.error("Metadata Error: year attribute is blank")
|
||||||
|
elif not isinstance(self.metadata[m]["year"], int): logger.error("Metadata Error: year attribute must be an integer")
|
||||||
|
elif self.metadata[m]["year"] not in range(1800, now.year + 2): logger.error("Metadata Error: year attribute must be between 1800-{}".format(now.year + 1))
|
||||||
|
else: year = self.metadata[m]["year"]
|
||||||
|
|
||||||
|
alt_title = None
|
||||||
|
used_alt = False
|
||||||
|
if "alt_title" in self.metadata[m]:
|
||||||
|
if self.metadata[m]["alt_title"] is None: logger.error("Metadata Error: alt_title attribute is blank")
|
||||||
|
else: alt_title = self.metadata[m]["alt_title"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
item = self.get_item(m, year=year)
|
||||||
|
except Failed as e:
|
||||||
|
if alt_title:
|
||||||
|
try:
|
||||||
|
item = self.get_item(alt_title, year=year)
|
||||||
|
used_alt = True
|
||||||
|
except Failed as alt_e:
|
||||||
|
logger.error(alt_e)
|
||||||
|
logger.error("Skipping {}".format(m))
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
logger.error(e)
|
||||||
|
logger.error("Skipping {}".format(m))
|
||||||
|
continue
|
||||||
|
|
||||||
|
logger.info("Updating {}: {}...".format("Movie" if self.is_movie else "Show", alt_title if used_alt else m))
|
||||||
|
edits = {}
|
||||||
|
def add_edit(name, group, key=None, value=None, sub=None):
|
||||||
|
if value or name in group:
|
||||||
|
if value or group[name]:
|
||||||
|
if key is None: key = name
|
||||||
|
if value is None: value = group[name]
|
||||||
|
if sub and "sub" in group:
|
||||||
|
if group["sub"]:
|
||||||
|
if group["sub"] is True and "(SUB)" not in value: value = "{} (SUB)".format(value)
|
||||||
|
elif group["sub"] is False and " (SUB)" in value: value = value[:-6]
|
||||||
|
else:
|
||||||
|
logger.error("Metadata Error: sub attribute is blank")
|
||||||
|
edits["{}.value".format(key)] = value
|
||||||
|
edits["{}.locked".format(key)] = 1
|
||||||
|
else:
|
||||||
|
logger.error("Metadata Error: {} attribute is blank".format(name))
|
||||||
|
if used_alt or "sub" in self.metadata[m]:
|
||||||
|
add_edit("title", self.metadata[m], value=m, sub=True)
|
||||||
|
add_edit("sort_title", self.metadata[m], key="titleSort")
|
||||||
|
add_edit("originally_available", self.metadata[m], key="originallyAvailableAt")
|
||||||
|
add_edit("rating", self.metadata[m])
|
||||||
|
add_edit("content_rating", self.metadata[m], key="contentRating")
|
||||||
|
add_edit("original_title", self.metadata[m], key="originalTitle")
|
||||||
|
add_edit("studio", self.metadata[m])
|
||||||
|
add_edit("tagline", self.metadata[m])
|
||||||
|
add_edit("summary", self.metadata[m])
|
||||||
|
try:
|
||||||
|
item.edit(**edits)
|
||||||
|
item.reload()
|
||||||
|
logger.info("{}: {} Details Update Successful".format("Movie" if self.is_movie else "Show", m))
|
||||||
|
except BadRequest:
|
||||||
|
logger.error("{}: {} Details Update Failed".format("Movie" if self.is_movie else "Show", m))
|
||||||
|
logger.debug("Details Update: {}".format(edits))
|
||||||
|
util.print_stacktrace()
|
||||||
|
|
||||||
|
if "genre" in self.metadata[m]:
|
||||||
|
if self.metadata[m]["genre"]:
|
||||||
|
genre_sync = False
|
||||||
|
if "genre_sync_mode" in self.metadata[m]:
|
||||||
|
if self.metadata[m]["genre_sync_mode"] is None: logger.error("Metadata Error: genre_sync_mode attribute is blank defaulting to append")
|
||||||
|
elif self.metadata[m]["genre_sync_mode"] not in ["append", "sync"]: logger.error("Metadata Error: genre_sync_mode attribute must be either 'append' or 'sync' defaulting to append")
|
||||||
|
elif self.metadata[m]["genre_sync_mode"] == "sync": genre_sync = True
|
||||||
|
genres = [genre.tag for genre in item.genres]
|
||||||
|
values = util.get_list(self.metadata[m]["genre"])
|
||||||
|
if genre_sync:
|
||||||
|
for genre in (g for g in genres if g not in values):
|
||||||
|
item.removeGenre(genre)
|
||||||
|
logger.info("Detail: Genre {} removed".format(genre))
|
||||||
|
for value in (v for v in values if v not in genres):
|
||||||
|
item.addGenre(value)
|
||||||
|
logger.info("Detail: Genre {} added".format(value))
|
||||||
|
else:
|
||||||
|
logger.error("Metadata Error: genre attribute is blank")
|
||||||
|
|
||||||
|
if "label" in self.metadata[m]:
|
||||||
|
if self.metadata[m]["label"]:
|
||||||
|
label_sync = False
|
||||||
|
if "label_sync_mode" in self.metadata[m]:
|
||||||
|
if self.metadata[m]["label_sync_mode"] is None: logger.error("Metadata Error: label_sync_mode attribute is blank defaulting to append")
|
||||||
|
elif self.metadata[m]["label_sync_mode"] not in ["append", "sync"]: logger.error("Metadata Error: label_sync_mode attribute must be either 'append' or 'sync' defaulting to append")
|
||||||
|
elif self.metadata[m]["label_sync_mode"] == "sync": label_sync = True
|
||||||
|
labels = [label.tag for label in item.labels]
|
||||||
|
values = util.get_list(self.metadata[m]["label"])
|
||||||
|
if label_sync:
|
||||||
|
for label in (l for l in labels if l not in values):
|
||||||
|
item.removeLabel(label)
|
||||||
|
logger.info("Detail: Label {} removed".format(label))
|
||||||
|
for value in (v for v in values if v not in labels):
|
||||||
|
item.addLabel(v)
|
||||||
|
logger.info("Detail: Label {} added".format(v))
|
||||||
|
else:
|
||||||
|
logger.error("Metadata Error: label attribute is blank")
|
||||||
|
|
||||||
|
if "seasons" in self.metadata[m] and self.is_show:
|
||||||
|
if self.metadata[m]["seasons"]:
|
||||||
|
for season_id in self.metadata[m]["seasons"]:
|
||||||
|
logger.info("")
|
||||||
|
logger.info("Updating season {} of {}...".format(season_id, alt_title if used_alt else m))
|
||||||
|
if isinstance(season_id, int):
|
||||||
|
try: season = item.season(season_id)
|
||||||
|
except NotFound: logger.error("Metadata Error: Season: {} not found".format(season_id))
|
||||||
|
else:
|
||||||
|
edits = {}
|
||||||
|
add_edit("title", self.metadata[m]["seasons"][season_id], sub=True)
|
||||||
|
add_edit("summary", self.metadata[m]["seasons"][season_id])
|
||||||
|
try:
|
||||||
|
season.edit(**edits)
|
||||||
|
season.reload()
|
||||||
|
logger.info("Season: {} Details Update Successful".format(season_id))
|
||||||
|
except BadRequest:
|
||||||
|
logger.debug("Season: {} Details Update: {}".format(season_id, edits))
|
||||||
|
logger.error("Season: {} Details Update Failed".format(season_id))
|
||||||
|
util.print_stacktrace()
|
||||||
|
else:
|
||||||
|
logger.error("Metadata Error: Season: {} invalid, it must be an integer".format(season_id))
|
||||||
|
else:
|
||||||
|
logger.error("Metadata Error: seasons attribute is blank")
|
||||||
|
|
||||||
|
if "episodes" in self.metadata[m] and self.is_show:
|
||||||
|
if self.metadata[m]["episodes"]:
|
||||||
|
for episode_str in self.metadata[m]["episodes"]:
|
||||||
|
logger.info("")
|
||||||
|
match = re.search("[Ss]{1}\d+[Ee]{1}\d+", episode_str)
|
||||||
|
if match:
|
||||||
|
output = match.group(0)[1:].split("E" if "E" in m.group(0) else "e")
|
||||||
|
episode_id = int(output[0])
|
||||||
|
season_id = int(output[1])
|
||||||
|
logger.info("Updating episode S{}E{} of {}...".format(episode_id, season_id, alt_title if used_alt else m))
|
||||||
|
try: episode = item.episode(season=season_id, episode=episode_id)
|
||||||
|
except NotFound: logger.error("Metadata Error: episode {} of season {} not found".format(episode_id, season_id))
|
||||||
|
else:
|
||||||
|
edits = {}
|
||||||
|
add_edit("title", self.metadata[m]["episodes"][episode_str], sub=True)
|
||||||
|
add_edit("sort_title", self.metadata[m]["episodes"][episode_str], key="titleSort")
|
||||||
|
add_edit("rating", self.metadata[m]["episodes"][episode_str])
|
||||||
|
add_edit("originally_available", self.metadata[m]["episodes"][episode_str], key="originallyAvailableAt")
|
||||||
|
add_edit("summary", self.metadata[m]["episodes"][episode_str])
|
||||||
|
try:
|
||||||
|
episode.edit(**edits)
|
||||||
|
episode.reload()
|
||||||
|
logger.info("Season: {} Episode: {} Details Update Successful".format(season_id, episode_id))
|
||||||
|
except BadRequest:
|
||||||
|
logger.debug("Season: {} Episode: {} Details Update: {}".format(season_id, episode_id, edits))
|
||||||
|
logger.error("Season: {} Episode: {} Details Update Failed".format(season_id, episode_id))
|
||||||
|
util.print_stacktrace()
|
||||||
|
else:
|
||||||
|
logger.error("Metadata Error: episode {} invlaid must have S##E## format".format(episode_str))
|
||||||
|
else:
|
||||||
|
logger.error("Metadata Error: episodes attribute is blank")
|
87
modules/radarr.py
Normal file
87
modules/radarr.py
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
import logging, re, requests
|
||||||
|
from modules import util
|
||||||
|
from modules.util import Failed
|
||||||
|
from retrying import retry
|
||||||
|
|
||||||
|
logger = logging.getLogger("Plex Meta Manager")
|
||||||
|
|
||||||
|
class RadarrAPI:
|
||||||
|
def __init__(self, tmdb, params):
|
||||||
|
self.url_params = {"apikey": "{}".format(params["token"])}
|
||||||
|
self.base_url = "{}/api{}".format(params["url"], "/v3/" if params["version"] == "v3" else "/")
|
||||||
|
try:
|
||||||
|
response = requests.get("{}system/status".format(self.base_url), params=self.url_params)
|
||||||
|
result = response.json()
|
||||||
|
except Exception as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
raise Failed("Radarr Error: Could not connect to Radarr at {}".format(params["url"]))
|
||||||
|
if "error" in result and result["error"] == "Unauthorized":
|
||||||
|
raise Failed("Radarr Error: Invalid API Key")
|
||||||
|
if "version" not in result:
|
||||||
|
raise Failed("Radarr Error: Unexpected Response Check URL")
|
||||||
|
response = requests.get("{}{}".format(self.base_url, "qualityProfile" if params["version"] == "v3" else "profile"), params=self.url_params)
|
||||||
|
self.quality_profile_id = None
|
||||||
|
profiles = ""
|
||||||
|
for profile in response.json():
|
||||||
|
if len(profiles) > 0:
|
||||||
|
profiles += ", "
|
||||||
|
profiles += profile["name"]
|
||||||
|
if profile["name"] == params["quality_profile"]:
|
||||||
|
self.quality_profile_id = profile["id"]
|
||||||
|
if not self.quality_profile_id:
|
||||||
|
raise Failed("Radarr Error: quality_profile: {} does not exist in radarr. Profiles available: {}".format(params["quality_profile"], profiles))
|
||||||
|
self.tmdb = tmdb
|
||||||
|
self.url = params["url"]
|
||||||
|
self.version = params["version"]
|
||||||
|
self.token = params["token"]
|
||||||
|
self.root_folder_path = params["root_folder_path"]
|
||||||
|
self.add = params["add"]
|
||||||
|
self.search = params["search"]
|
||||||
|
|
||||||
|
def add_tmdb(self, tmdb_ids):
|
||||||
|
logger.info("")
|
||||||
|
logger.debug("TMDb IDs: {}".format(tmdb_ids))
|
||||||
|
add_count = 0
|
||||||
|
for tmdb_id in tmdb_ids:
|
||||||
|
try:
|
||||||
|
movie = self.tmdb.get_movie(tmdb_id)
|
||||||
|
except Failed as e:
|
||||||
|
logger.error(e)
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
year = movie.release_date.split("-")[0]
|
||||||
|
except AttributeError:
|
||||||
|
logger.error("TMDB Error: No year for ({}) {}".format(tmdb_id, movie.title))
|
||||||
|
continue
|
||||||
|
|
||||||
|
if year.isdigit() is False:
|
||||||
|
logger.error("TMDB Error: No release date yet for ({}) {}".format(tmdb_id, movie.title))
|
||||||
|
continue
|
||||||
|
|
||||||
|
poster = "https://image.tmdb.org/t/p/original{}".format(movie.poster_path)
|
||||||
|
|
||||||
|
titleslug = re.sub(r"([^\s\w]|_)+", "", "{} {}".format(movie.title, year)).replace(" ", "-").lower()
|
||||||
|
|
||||||
|
url_json = {
|
||||||
|
"title": movie.title,
|
||||||
|
"{}".format("qualityProfileId" if self.version == "v3" else "profileId"): self.quality_profile_id,
|
||||||
|
"year": int(year),
|
||||||
|
"tmdbid": str(tmdb_id),
|
||||||
|
"titleslug": titleslug,
|
||||||
|
"monitored": True,
|
||||||
|
"rootFolderPath": self.root_folder_path,
|
||||||
|
"images": [{"covertype": "poster", "url": poster}],
|
||||||
|
"addOptions": {"searchForMovie": self.search}
|
||||||
|
}
|
||||||
|
response = self.send_post("{}movie".format(self.base_url), url_json)
|
||||||
|
if response.status_code < 400:
|
||||||
|
logger.info("Added to Radarr | {:<6} | {}".format(tmdb_id, movie.title))
|
||||||
|
add_count += 1
|
||||||
|
else:
|
||||||
|
logger.error("Radarr Error: ({}) {}: ({}) {}".format(tmdb_id, movie.title, response.status_code, response.json()[0]["errorMessage"]))
|
||||||
|
logger.info("{} Movie{} added to Radarr".format(add_count, "s" if add_count > 1 else ""))
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||||
|
def send_post(self, url, url_json):
|
||||||
|
return requests.post(url, json=url_json, params=self.url_params)
|
78
modules/sonarr.py
Normal file
78
modules/sonarr.py
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
import logging, re, requests
|
||||||
|
from modules import util
|
||||||
|
from modules.util import Failed
|
||||||
|
from retrying import retry
|
||||||
|
|
||||||
|
logger = logging.getLogger("Plex Meta Manager")
|
||||||
|
|
||||||
|
class SonarrAPI:
|
||||||
|
def __init__(self, tvdb, params, language):
|
||||||
|
self.url_params = {"apikey": "{}".format(params["token"])}
|
||||||
|
self.base_url = "{}/api{}".format(params["url"], "/v3/" if params["version"] == "v3" else "/")
|
||||||
|
try:
|
||||||
|
response = requests.get("{}system/status".format(self.base_url), params=self.url_params)
|
||||||
|
result = response.json()
|
||||||
|
except Exception as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
raise Failed("Sonarr Error: Could not connect to Sonarr at {}".format(params["url"]))
|
||||||
|
if "error" in result and result["error"] == "Unauthorized":
|
||||||
|
raise Failed("Sonarr Error: Invalid API Key")
|
||||||
|
if "version" not in result:
|
||||||
|
raise Failed("Sonarr Error: Unexpected Response Check URL")
|
||||||
|
response = requests.get("{}{}".format(self.base_url, "qualityProfile" if params["version"] == "v3" else "profile"), params=self.url_params)
|
||||||
|
self.quality_profile_id = None
|
||||||
|
profiles = ""
|
||||||
|
for profile in response.json():
|
||||||
|
if len(profiles) > 0:
|
||||||
|
profiles += ", "
|
||||||
|
profiles += profile["name"]
|
||||||
|
if profile["name"] == params["quality_profile"]:
|
||||||
|
self.quality_profile_id = profile["id"]
|
||||||
|
if not self.quality_profile_id:
|
||||||
|
raise Failed("Sonarr Error: quality_profile: {} does not exist in sonarr. Profiles available: {}".format(params["quality_profile"], profiles))
|
||||||
|
self.tvdb = tvdb
|
||||||
|
self.language = language
|
||||||
|
self.url = params["url"]
|
||||||
|
self.version = params["version"]
|
||||||
|
self.token = params["token"]
|
||||||
|
self.root_folder_path = params["root_folder_path"]
|
||||||
|
self.add = params["add"]
|
||||||
|
self.search = params["search"]
|
||||||
|
|
||||||
|
def add_tvdb(self, tvdb_ids):
|
||||||
|
logger.info("")
|
||||||
|
logger.debug("TVDb IDs: {}".format(tvdb_ids))
|
||||||
|
add_count = 0
|
||||||
|
for tvdb_id in tvdb_ids:
|
||||||
|
try:
|
||||||
|
show = self.tvdb.get_series(self.language, tvdb_id=tvdb_id)
|
||||||
|
except Failed as e:
|
||||||
|
logger.error(e)
|
||||||
|
continue
|
||||||
|
|
||||||
|
titleslug = re.sub(r"([^\s\w]|_)+", "", show.title).replace(" ", "-").lower()
|
||||||
|
|
||||||
|
url_json = {
|
||||||
|
"title": show.title,
|
||||||
|
"{}".format("qualityProfileId" if self.version == "v3" else "profileId"): self.quality_profile_id,
|
||||||
|
"languageProfileId": 1,
|
||||||
|
"tvdbId": int(tvdb_id),
|
||||||
|
"titleslug": titleslug,
|
||||||
|
"language": self.language,
|
||||||
|
"monitored": True,
|
||||||
|
"rootFolderPath": self.root_folder_path,
|
||||||
|
"seasons" : [],
|
||||||
|
"images": [{"covertype": "poster", "url": show.poster_path}],
|
||||||
|
"addOptions": {"searchForMissingEpisodes": self.search}
|
||||||
|
}
|
||||||
|
response = self.send_post("{}series".format(self.base_url), url_json)
|
||||||
|
if response.status_code < 400:
|
||||||
|
logger.info("Added to Sonarr | {:<6} | {}".format(tvdb_id, show.title))
|
||||||
|
add_count += 1
|
||||||
|
else:
|
||||||
|
logger.error("Sonarr Error: ({}) {}: ({}) {}".format(tvdb_id, show.title, response.status_code, response.json()[0]["errorMessage"]))
|
||||||
|
logger.info("{} Show{} added to Sonarr".format(add_count, "s" if add_count > 1 else ""))
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||||
|
def send_post(self, url, url_json):
|
||||||
|
return requests.post(url, json=url_json, params=self.url_params)
|
62
modules/tautulli.py
Normal file
62
modules/tautulli.py
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
import logging, requests
|
||||||
|
from modules import util
|
||||||
|
from modules.util import Failed
|
||||||
|
from retrying import retry
|
||||||
|
|
||||||
|
logger = logging.getLogger("Plex Meta Manager")
|
||||||
|
|
||||||
|
class TautulliAPI:
|
||||||
|
def __init__(self, params):
|
||||||
|
try:
|
||||||
|
response = requests.get("{}/api/v2?apikey={}&cmd=get_library_names".format(params["url"], params["apikey"])).json()
|
||||||
|
except Exception as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
raise Failed("Tautulli Error: Invalid url")
|
||||||
|
if response["response"]["result"] != "success":
|
||||||
|
raise Failed("Tautulli Error: {}".format(response["response"]["message"]))
|
||||||
|
self.url = params["url"]
|
||||||
|
self.apikey = params["apikey"]
|
||||||
|
|
||||||
|
def get_popular(self, library, time_range=30, stats_count=20, stats_count_buffer=20, status_message=True):
|
||||||
|
return self.get_items(library, time_range=time_range, stats_count=stats_count, list_type="popular", stats_count_buffer=stats_count_buffer, status_message=status_message)
|
||||||
|
|
||||||
|
def get_top(self, library, time_range=30, stats_count=20, stats_count_buffer=20, status_message=True):
|
||||||
|
return self.get_items(library, time_range=time_range, stats_count=stats_count, list_type="top", stats_count_buffer=stats_count_buffer, status_message=status_message)
|
||||||
|
|
||||||
|
def get_items(self, library, time_range=30, stats_count=20, list_type="popular", stats_count_buffer=20, status_message=True):
|
||||||
|
if status_message:
|
||||||
|
logger.info("Processing Tautulli Most {}: {} {}".format("Popular" if list_type == "popular" else "Watched", stats_count, "Movies" if library.is_movie else "Shows"))
|
||||||
|
response = self.send_request("{}/api/v2?apikey={}&cmd=get_home_stats&time_range={}&stats_count={}".format(self.url, self.apikey, time_range, int(stats_count) + int(stats_count_buffer)))
|
||||||
|
stat_id = "{}_{}".format("popular" if list_type == "popular" else "top", "movies" if library.is_movie else "tv")
|
||||||
|
|
||||||
|
items = None
|
||||||
|
for entry in response["response"]["data"]:
|
||||||
|
if entry["stat_id"] == stat_id:
|
||||||
|
items = entry["rows"]
|
||||||
|
break
|
||||||
|
if items is None:
|
||||||
|
raise Failed("Tautulli Error: No Items found in the response")
|
||||||
|
|
||||||
|
section_id = self.get_section_id(library.name)
|
||||||
|
rating_keys = []
|
||||||
|
count = 0
|
||||||
|
for item in items:
|
||||||
|
if item["section_id"] == section_id and count < int(stats_count):
|
||||||
|
rating_keys.append(item["rating_key"])
|
||||||
|
count += 1
|
||||||
|
return rating_keys
|
||||||
|
|
||||||
|
def get_section_id(self, library_name):
|
||||||
|
response = self.send_request("{}/api/v2?apikey={}&cmd=get_library_names".format(self.url, self.apikey))
|
||||||
|
section_id = None
|
||||||
|
for entry in response["response"]["data"]:
|
||||||
|
if entry["section_name"] == library_name:
|
||||||
|
section_id = entry["section_id"]
|
||||||
|
break
|
||||||
|
if section_id: return section_id
|
||||||
|
else: raise Failed("Tautulli Error: No Library named {} in the response".format(library_name))
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||||
|
def send_request(self, url):
|
||||||
|
logger.debug("Tautulli URL: {}".format(url.replace(self.apikey, "################################")))
|
||||||
|
return requests.get(url).json()
|
417
modules/tests.py
Normal file
417
modules/tests.py
Normal file
|
@ -0,0 +1,417 @@
|
||||||
|
import logging
|
||||||
|
from modules import util
|
||||||
|
from modules.config import Config
|
||||||
|
from modules.util import Failed
|
||||||
|
|
||||||
|
logger = logging.getLogger("Plex Meta Manager")
|
||||||
|
|
||||||
|
def run_tests(default_dir):
|
||||||
|
try:
|
||||||
|
config = Config(default_dir)
|
||||||
|
logger.info("")
|
||||||
|
util.seperator("Mapping Tests")
|
||||||
|
|
||||||
|
config.map_guids(config.libraries[0])
|
||||||
|
config.map_guids(config.libraries[1])
|
||||||
|
config.map_guids(config.libraries[2])
|
||||||
|
anidb_tests(config)
|
||||||
|
imdb_tests(config)
|
||||||
|
mal_tests(config)
|
||||||
|
tautulli_tests(config)
|
||||||
|
tmdb_tests(config)
|
||||||
|
trakt_tests(config)
|
||||||
|
tvdb_tests(config)
|
||||||
|
util.seperator("Finished All Plex Meta Manager Tests")
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
util.seperator("Canceled Plex Meta Manager Tests")
|
||||||
|
|
||||||
|
def anidb_tests(config):
|
||||||
|
if config.AniDB:
|
||||||
|
util.seperator("AniDB Tests")
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.AniDB.convert_anidb_to_tvdb(69)
|
||||||
|
logger.info("Success | Convert AniDB to TVDb")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Convert AniDB to TVDb: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.AniDB.convert_anidb_to_imdb(112)
|
||||||
|
logger.info("Success | Convert AniDB to IMDb")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Convert AniDB to IMDb: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.AniDB.convert_tvdb_to_anidb(81797)
|
||||||
|
logger.info("Success | Convert TVDb to AniDB")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Convert TVDb to AniDB: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.AniDB.convert_imdb_to_anidb("tt0245429")
|
||||||
|
logger.info("Success | Convert IMDb to AniDB")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Convert IMDb to AniDB: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.AniDB.get_items("anidb_id", 69, "en", status_message=False)
|
||||||
|
logger.info("Success | Get AniDB ID")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Get AniDB ID: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.AniDB.get_items("anidb_relation", 69, "en", status_message=False)
|
||||||
|
logger.info("Success | Get AniDB Relation")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Get AniDB Relation: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.AniDB.get_items("anidb_popular", 30, "en", status_message=False)
|
||||||
|
logger.info("Success | Get AniDB Popular")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Get AniDB Popular: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.AniDB.validate_anidb_list(["69", "112"], "en")
|
||||||
|
logger.info("Success | Validate AniDB List")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Validate AniDB List: {}".format(e))
|
||||||
|
|
||||||
|
else:
|
||||||
|
util.seperator("AniDB Not Configured")
|
||||||
|
|
||||||
|
def imdb_tests(config):
|
||||||
|
if config.IMDb:
|
||||||
|
util.seperator("IMDb Tests")
|
||||||
|
|
||||||
|
tmdb_ids, tvdb_ids = config.IMDb.get_items("imdb_list", {"url": "https://www.imdb.com/search/title/?groups=top_1000", "limit": 0}, "en", status_message=False)
|
||||||
|
if len(tmdb_ids) == 1000: logger.info("Success | IMDb URL get TMDb IDs")
|
||||||
|
else: logger.error("Failure | IMDb URL get TMDb IDs: {} Should be 1000".format(len(tmdb_ids)))
|
||||||
|
|
||||||
|
tmdb_ids, tvdb_ids = config.IMDb.get_items("imdb_list", {"url": "https://www.imdb.com/list/ls026173135/", "limit": 0}, "en", status_message=False)
|
||||||
|
if len(tmdb_ids) == 250: logger.info("Success | IMDb URL get TMDb IDs")
|
||||||
|
else: logger.error("Failure | IMDb URL get TMDb IDs: {} Should be 250".format(len(tmdb_ids)))
|
||||||
|
|
||||||
|
tmdb_ids, tvdb_ids = config.IMDb.get_items("imdb_id", "tt0814243", "en", status_message=False)
|
||||||
|
if len(tmdb_ids) == 1: logger.info("Success | IMDb ID get TMDb IDs")
|
||||||
|
else: logger.error("Failure | IMDb ID get TMDb IDs: {} Should be 1".format(len(tmdb_ids)))
|
||||||
|
|
||||||
|
else:
|
||||||
|
util.seperator("IMDb Not Configured")
|
||||||
|
|
||||||
|
def mal_tests(config):
|
||||||
|
if config.MyAnimeListIDList:
|
||||||
|
util.seperator("MyAnimeListXML Tests")
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.MyAnimeListIDList.convert_mal_to_tvdb(21)
|
||||||
|
logger.info("Success | Convert MyAnimeList to TVDb")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Convert MyAnimeList to TVDb: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.MyAnimeListIDList.convert_mal_to_tmdb(199)
|
||||||
|
logger.info("Success | Convert MyAnimeList to TMDb")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Convert MyAnimeList to TMDb: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.MyAnimeListIDList.convert_tvdb_to_mal(81797)
|
||||||
|
logger.info("Success | Convert TVDb to MyAnimeList")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Convert TVDb to MyAnimeList: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.MyAnimeListIDList.convert_tmdb_to_mal(129)
|
||||||
|
logger.info("Success | Convert TMDb to MyAnimeList")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Convert TMDb to MyAnimeList: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.MyAnimeListIDList.find_mal_ids(21)
|
||||||
|
logger.info("Success | Find MyAnimeList ID")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Find MyAnimeList ID: {}".format(e))
|
||||||
|
|
||||||
|
else:
|
||||||
|
util.seperator("MyAnimeListXML Not Configured")
|
||||||
|
|
||||||
|
if config.MyAnimeList:
|
||||||
|
util.seperator("MyAnimeList Tests")
|
||||||
|
|
||||||
|
mal_list_tests = [
|
||||||
|
("mal_all", 10),
|
||||||
|
("mal_airing", 10),
|
||||||
|
("mal_upcoming", 10),
|
||||||
|
("mal_tv", 10),
|
||||||
|
("mal_movie", 10),
|
||||||
|
("mal_ova", 10),
|
||||||
|
("mal_special", 10),
|
||||||
|
("mal_popular", 10),
|
||||||
|
("mal_favorite", 10),
|
||||||
|
("mal_suggested", 10),
|
||||||
|
("mal_userlist", {"limit": 10, "username": "@me", "status": "completed", "sort_by": "list_score"}),
|
||||||
|
("mal_season", {"limit": 10, "season": "fall", "year": 2020, "sort_by": "anime_score"})
|
||||||
|
]
|
||||||
|
|
||||||
|
for mal_list_test in mal_list_tests:
|
||||||
|
try:
|
||||||
|
config.MyAnimeList.get_items(mal_list_test[0], mal_list_test[1], status_message=False)
|
||||||
|
logger.info("Success | Get Anime using {}".format(util.pretty_names[mal_list_test[0]]))
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Get Anime using {}: {}".format(util.pretty_names[mal_list_test[0]], e))
|
||||||
|
else:
|
||||||
|
util.seperator("MyAnimeList Not Configured")
|
||||||
|
|
||||||
|
def tautulli_tests(config):
|
||||||
|
if config.libraries[0].Tautulli:
|
||||||
|
util.seperator("Tautulli Tests")
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.libraries[0].Tautulli.get_section_id(config.libraries[0].name)
|
||||||
|
logger.info("Success | Get Section ID")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Get Section ID: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.libraries[0].Tautulli.get_popular(config.libraries[0], status_message=False)
|
||||||
|
logger.info("Success | Get Popular")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Get Popular: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.libraries[0].Tautulli.get_top(config.libraries[0], status_message=False)
|
||||||
|
logger.info("Success | Get Top")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Get Top: {}".format(e))
|
||||||
|
else:
|
||||||
|
util.seperator("Tautulli Not Configured")
|
||||||
|
|
||||||
|
def tmdb_tests(config):
|
||||||
|
if config.TMDb:
|
||||||
|
util.seperator("TMDb Tests")
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.TMDb.convert_imdb_to_tmdb("tt0076759")
|
||||||
|
logger.info("Success | Convert IMDb to TMDb")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Convert IMDb to TMDb: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.TMDb.convert_tmdb_to_imdb(11)
|
||||||
|
logger.info("Success | Convert TMDb to IMDb")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Convert TMDb to IMDb: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.TMDb.convert_imdb_to_tvdb("tt0458290")
|
||||||
|
logger.info("Success | Convert IMDb to TVDb")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Convert IMDb to TVDb: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.TMDb.convert_tvdb_to_imdb(83268)
|
||||||
|
logger.info("Success | Convert TVDb to IMDb")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Convert TVDb to IMDb: {}".format(e))
|
||||||
|
|
||||||
|
tmdb_list_tests = [
|
||||||
|
([11], "Movie"),
|
||||||
|
([4194], "Show"),
|
||||||
|
([10], "Collection"),
|
||||||
|
([1], "Person"),
|
||||||
|
([1], "Company"),
|
||||||
|
([2739], "Network"),
|
||||||
|
([8136], "List")
|
||||||
|
]
|
||||||
|
|
||||||
|
for tmdb_list_test in tmdb_list_tests:
|
||||||
|
try:
|
||||||
|
config.TMDb.validate_tmdb_list(tmdb_list_test[0], tmdb_type=tmdb_list_test[1])
|
||||||
|
logger.info("Success | Get TMDb {}".format(tmdb_list_test[1]))
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Get TMDb {}: {}".format(tmdb_list_test[1], e))
|
||||||
|
|
||||||
|
tmdb_list_tests = [
|
||||||
|
("tmdb_discover", {"sort_by": "popularity.desc", "limit": 100}, True),
|
||||||
|
("tmdb_discover", {"sort_by": "popularity.desc", "limit": 100}, False),
|
||||||
|
("tmdb_company", 1, True),
|
||||||
|
("tmdb_company", 1, False),
|
||||||
|
("tmdb_network", 2739, False),
|
||||||
|
("tmdb_keyword", 180547, True),
|
||||||
|
("tmdb_keyword", 180547, False),
|
||||||
|
("tmdb_now_playing", 10, True),
|
||||||
|
("tmdb_popular", 10, True),
|
||||||
|
("tmdb_popular", 10, False),
|
||||||
|
("tmdb_top_rated", 10, True),
|
||||||
|
("tmdb_top_rated", 10, False),
|
||||||
|
("tmdb_trending_daily", 10, True),
|
||||||
|
("tmdb_trending_daily", 10, False),
|
||||||
|
("tmdb_trending_weekly", 10, True),
|
||||||
|
("tmdb_trending_weekly", 10, False),
|
||||||
|
("tmdb_list", 7068209, True),
|
||||||
|
("tmdb_list", 7068209, False),
|
||||||
|
("tmdb_movie", 11, True),
|
||||||
|
("tmdb_collection", 10, True),
|
||||||
|
("tmdb_show", 4194, False)
|
||||||
|
]
|
||||||
|
|
||||||
|
for tmdb_list_test in tmdb_list_tests:
|
||||||
|
try:
|
||||||
|
config.TMDb.get_items(tmdb_list_test[0], tmdb_list_test[1], tmdb_list_test[2], status_message=False)
|
||||||
|
logger.info("Success | Get {} using {}".format("Movies" if tmdb_list_test[2] else "Shows", util.pretty_names[tmdb_list_test[0]]))
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Get {} using {}: {}".format("Movies" if tmdb_list_test[2] else "Shows", util.pretty_names[tmdb_list_test[0]], e))
|
||||||
|
else:
|
||||||
|
util.seperator("TMDb Not Configured")
|
||||||
|
|
||||||
|
def trakt_tests(config):
|
||||||
|
if config.Trakt:
|
||||||
|
util.seperator("Trakt Tests")
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.Trakt.convert_imdb_to_tmdb("tt0076759")
|
||||||
|
logger.info("Success | Convert IMDb to TMDb")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Convert IMDb to TMDb: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.Trakt.convert_tmdb_to_imdb(11)
|
||||||
|
logger.info("Success | Convert TMDb to IMDb")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Convert TMDb to IMDb: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.Trakt.convert_imdb_to_tvdb("tt0458290")
|
||||||
|
logger.info("Success | Convert IMDb to TVDb")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Convert IMDb to TVDb: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.Trakt.convert_tvdb_to_imdb(83268)
|
||||||
|
logger.info("Success | Convert TVDb to IMDb")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Convert TVDb to IMDb: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.Trakt.convert_tmdb_to_tvdb(11)
|
||||||
|
logger.info("Success | Convert TMDb to TVDb")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Convert TMDb to TVDb: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.Trakt.convert_tvdb_to_tmdb(83268)
|
||||||
|
logger.info("Success | Convert TVDb to TMDb")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Convert TVDb to TMDb: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.Trakt.validate_trakt_list(["https://trakt.tv/users/movistapp/lists/christmas-movies"])
|
||||||
|
logger.info("Success | Get List")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Get List: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.Trakt.validate_trakt_watchlist(["me"], True)
|
||||||
|
logger.info("Success | Get Watchlist Movies")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Get Watchlist Movies: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.Trakt.validate_trakt_watchlist(["me"], False)
|
||||||
|
logger.info("Success | Get Watchlist Shows")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Get Watchlist Shows: {}".format(e))
|
||||||
|
|
||||||
|
trakt_list_tests = [
|
||||||
|
("trakt_list", "https://trakt.tv/users/movistapp/lists/christmas-movies", True),
|
||||||
|
("trakt_trending", 10, True),
|
||||||
|
("trakt_trending", 10, False),
|
||||||
|
("trakt_watchlist", "me", True),
|
||||||
|
("trakt_watchlist", "me", False)
|
||||||
|
]
|
||||||
|
|
||||||
|
for trakt_list_test in trakt_list_tests:
|
||||||
|
try:
|
||||||
|
config.Trakt.get_items(trakt_list_test[0], trakt_list_test[1], trakt_list_test[2], status_message=False)
|
||||||
|
logger.info("Success | Get {} using {}".format("Movies" if trakt_list_test[2] else "Shows", util.pretty_names[trakt_list_test[0]]))
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | Get {} using {}: {}".format("Movies" if trakt_list_test[2] else "Shows", util.pretty_names[trakt_list_test[0]], e))
|
||||||
|
else:
|
||||||
|
util.seperator("Trakt Not Configured")
|
||||||
|
|
||||||
|
def tvdb_tests(config):
|
||||||
|
if config.TVDb:
|
||||||
|
util.seperator("TVDb Tests")
|
||||||
|
|
||||||
|
tmdb_ids, tvdb_ids = config.TVDb.get_items("tvdb_list", "https://www.thetvdb.com/lists/arrowverse", "en", status_message=False)
|
||||||
|
if len(tvdb_ids) == 10 and len(tmdb_ids) == 0: logger.info("Success | TVDb URL get TVDb IDs and TMDb IDs")
|
||||||
|
else: logger.error("Failure | TVDb URL get TVDb IDs and TMDb IDs: {} Should be 10 and {} Should be 0".format(len(tvdb_ids), len(tmdb_ids)))
|
||||||
|
|
||||||
|
tmdb_ids, tvdb_ids = config.TVDb.get_items("tvdb_list", "https://www.thetvdb.com/lists/6957", "en", status_message=False)
|
||||||
|
if len(tvdb_ids) == 4 and len(tmdb_ids) == 2: logger.info("Success | TVDb URL get TVDb IDs and TMDb IDs")
|
||||||
|
else: logger.error("Failure | TVDb URL get TVDb IDs and TMDb IDs: {} Should be 4 and {} Should be 2".format(len(tvdb_ids), len(tmdb_ids)))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.TVDb.get_items("tvdb_show", "https://www.thetvdb.com/series/arrow", "en", status_message=False)
|
||||||
|
logger.info("Success | TVDb URL get TVDb Series ID")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | TVDb URL get TVDb Series ID: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.TVDb.get_items("tvdb_show", 279121, "en", status_message=False)
|
||||||
|
logger.info("Success | TVDb ID get TVDb Series ID")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | TVDb ID get TVDb Series ID: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.TVDb.get_items("tvdb_movie", "https://www.thetvdb.com/movies/the-lord-of-the-rings-the-fellowship-of-the-ring", "en", status_message=False)
|
||||||
|
logger.info("Success | TVDb URL get TVDb Movie ID")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | TVDb URL get TVDb Movie ID: {}".format(e))
|
||||||
|
|
||||||
|
try:
|
||||||
|
config.TVDb.get_items("tvdb_movie", 107, "en", status_message=False)
|
||||||
|
logger.info("Success | TVDb ID get TVDb Movie ID")
|
||||||
|
except Failed as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.error("Failure | TVDb ID get TVDb Movie ID: {}".format(e))
|
||||||
|
|
||||||
|
else:
|
||||||
|
util.seperator("TVDb Not Configured")
|
225
modules/tmdb.py
Normal file
225
modules/tmdb.py
Normal file
|
@ -0,0 +1,225 @@
|
||||||
|
import logging, os, tmdbv3api
|
||||||
|
from modules import util
|
||||||
|
from modules.util import Failed
|
||||||
|
from retrying import retry
|
||||||
|
from tmdbv3api.exceptions import TMDbException
|
||||||
|
|
||||||
|
logger = logging.getLogger("Plex Meta Manager")
|
||||||
|
|
||||||
|
class TMDbAPI:
|
||||||
|
def __init__(self, params):
|
||||||
|
self.TMDb = tmdbv3api.TMDb()
|
||||||
|
self.TMDb.api_key = params["apikey"]
|
||||||
|
self.TMDb.language = params["language"]
|
||||||
|
response = tmdbv3api.Configuration().info()
|
||||||
|
if hasattr(response, "status_message"):
|
||||||
|
raise Failed("TMDb Error: {}".format(response.status_message))
|
||||||
|
self.apikey = params["apikey"]
|
||||||
|
self.language = params["language"]
|
||||||
|
self.Movie = tmdbv3api.Movie()
|
||||||
|
self.TV = tmdbv3api.TV()
|
||||||
|
self.Discover = tmdbv3api.Discover()
|
||||||
|
self.Trending = tmdbv3api.Trending()
|
||||||
|
self.Keyword = tmdbv3api.Keyword()
|
||||||
|
self.List = tmdbv3api.List()
|
||||||
|
self.Company = tmdbv3api.Company()
|
||||||
|
self.Network = tmdbv3api.Network()
|
||||||
|
self.Collection = tmdbv3api.Collection()
|
||||||
|
self.Person = tmdbv3api.Person()
|
||||||
|
self.image_url = "https://image.tmdb.org/t/p/original"
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||||
|
def convert_from_tmdb(self, tmdb_id, convert_to, is_movie):
|
||||||
|
try: return self.Movie.external_ids(tmdb_id)[convert_to] if is_movie else self.TV.external_ids(tmdb_id)[convert_to]
|
||||||
|
except TMDbException: raise Failed("TMDB Error: No {} found for TMDb ID {}".format(convert_to.upper().replace("B_", "b "), tmdb_id))
|
||||||
|
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||||
|
def convert_to_tmdb(self, external_id, external_source, is_movie):
|
||||||
|
search_results = self.Movie.external(external_id=external_id, external_source=external_source)
|
||||||
|
search = search_results["movie_results" if is_movie else "tv_results"]
|
||||||
|
if len(search) == 1: return search[0]["id"]
|
||||||
|
else: raise Failed("TMDB Error: No TMDb ID found for {} {}".format(external_source.upper().replace("B_", "b "), external_id))
|
||||||
|
|
||||||
|
def convert_tmdb_to_imdb(self, tmdb_id, is_movie=True): return self.convert_from_tmdb(tmdb_id, "imdb_id", is_movie)
|
||||||
|
def convert_imdb_to_tmdb(self, imdb_id, is_movie=True): return self.convert_to_tmdb(imdb_id, "imdb_id", is_movie)
|
||||||
|
def convert_tmdb_to_tvdb(self, tmdb_id): return self.convert_from_tmdb(tmdb_id, "tvdb_id", False)
|
||||||
|
def convert_tvdb_to_tmdb(self, tvdb_id): return self.convert_to_tmdb(tvdb_id, "tvdb_id", False)
|
||||||
|
def convert_tvdb_to_imdb(self, tvdb_id): return self.convert_tmdb_to_imdb(self.convert_tvdb_to_tmdb(tvdb_id), False)
|
||||||
|
def convert_imdb_to_tvdb(self, imdb_id): return self.convert_tmdb_to_tvdb(self.convert_imdb_to_tmdb(imdb_id, False))
|
||||||
|
|
||||||
|
def get_movie_show_or_collection(self, tmdb_id, is_movie):
|
||||||
|
if is_movie:
|
||||||
|
try: return self.get_collection(tmdb_id)
|
||||||
|
except Failed:
|
||||||
|
try: return self.get_movie(tmdb_id)
|
||||||
|
except Failed: raise Failed("TMDb Error: No Movie or Collection found for TMDb ID {}".format(tmdb_id))
|
||||||
|
else: return self.get_show(tmdb_id)
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||||
|
def get_movie(self, tmdb_id):
|
||||||
|
try: return self.Movie.details(tmdb_id)
|
||||||
|
except TMDbException as e: raise Failed("TMDb Error: No Movie found for TMDb ID {}: {}".format(tmdb_id, e))
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||||
|
def get_show(self, tmdb_id):
|
||||||
|
try: return self.TV.details(tmdb_id)
|
||||||
|
except TMDbException as e: raise Failed("TMDb Error: No Show found for TMDb ID {}: {}".format(tmdb_id, e))
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||||
|
def get_collection(self, tmdb_id):
|
||||||
|
try: return self.Collection.details(tmdb_id)
|
||||||
|
except TMDbException as e: raise Failed("TMDb Error: No Collection found for TMDb ID {}: {}".format(tmdb_id, e))
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||||
|
def get_person(self, tmdb_id):
|
||||||
|
try: return self.Person.details(tmdb_id)
|
||||||
|
except TMDbException as e: raise Failed("TMDb Error: No Person found for TMDb ID {}: {}".format(tmdb_id, e))
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||||
|
def get_company(self, tmdb_id):
|
||||||
|
try: return self.Company.details(tmdb_id)
|
||||||
|
except TMDbException as e: raise Failed("TMDb Error: No Company found for TMDb ID {}: {}".format(tmdb_id, e))
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||||
|
def get_network(self, tmdb_id):
|
||||||
|
try: return self.Network.details(tmdb_id)
|
||||||
|
except TMDbException as e: raise Failed("TMDb Error: No Network found for TMDb ID {}: {}".format(tmdb_id, e))
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||||
|
def get_keyword(self, tmdb_id):
|
||||||
|
try: return self.Keyword.details(tmdb_id)
|
||||||
|
except TMDbException as e: raise Failed("TMDb Error: No Keyword found for TMDb ID {}: {}".format(tmdb_id, e))
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||||
|
def get_list(self, tmdb_id):
|
||||||
|
try: return self.List.details(tmdb_id, all_details=True)
|
||||||
|
except TMDbException as e: raise Failed("TMDb Error: No List found for TMDb ID {}: {}".format(tmdb_id, e))
|
||||||
|
|
||||||
|
def get_pagenation(self, method, amount, is_movie):
|
||||||
|
ids = []
|
||||||
|
count = 0
|
||||||
|
for x in range(int(amount / 20) + 1):
|
||||||
|
if method == "tmdb_popular": tmdb_items = self.Movie.popular(x + 1) if is_movie else self.TV.popular(x + 1)
|
||||||
|
elif method == "tmdb_top_rated": tmdb_items = self.Movie.top_rated(x + 1) if is_movie else self.TV.top_rated(x + 1)
|
||||||
|
elif method == "tmdb_now_playing" and is_movie: tmdb_items = self.Movie.now_playing(x + 1)
|
||||||
|
elif method == "tmdb_trending_daily": tmdb_items = self.Trending.movie_day(x + 1) if is_movie else self.Trending.tv_day(x + 1)
|
||||||
|
elif method == "tmdb_trending_weekly": tmdb_items = self.Trending.movie_week(x + 1) if is_movie else self.Trending.tv_week(x + 1)
|
||||||
|
for tmdb_item in tmdb_items:
|
||||||
|
try:
|
||||||
|
ids.append(tmdb_item.id if is_movie else self.convert_tmdb_to_tvdb(tmdb_item.id))
|
||||||
|
count += 1
|
||||||
|
except Failed:
|
||||||
|
pass
|
||||||
|
if count == amount: break
|
||||||
|
if count == amount: break
|
||||||
|
return ids
|
||||||
|
|
||||||
|
def get_discover(self, attrs, amount, is_movie):
|
||||||
|
ids = []
|
||||||
|
count = 0
|
||||||
|
self.Discover.discover_movies(attrs) if is_movie else self.Discover.discover_tv_shows(attrs)
|
||||||
|
total_pages = int(self.TMDb.total_pages)
|
||||||
|
total_results = int(self.TMDb.total_results)
|
||||||
|
amount = total_results if amount == 0 or total_results < amount else amount
|
||||||
|
for x in range(total_pages):
|
||||||
|
attrs["page"] = x + 1
|
||||||
|
tmdb_items = self.Discover.discover_movies(attrs) if is_movie else self.Discover.discover_tv_shows(attrs)
|
||||||
|
for tmdb_item in tmdb_items:
|
||||||
|
try:
|
||||||
|
ids.append(tmdb_item.id if is_movie else self.convert_tmdb_to_tvdb(tmdb_item.id))
|
||||||
|
count += 1
|
||||||
|
except Failed:
|
||||||
|
pass
|
||||||
|
if count == amount: break
|
||||||
|
if count == amount: break
|
||||||
|
return ids, amount
|
||||||
|
|
||||||
|
def get_items(self, method, data, is_movie, status_message=True):
|
||||||
|
if status_message:
|
||||||
|
logger.debug("Data: {}".format(data))
|
||||||
|
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||||
|
media_type = "Movie" if is_movie else "Show"
|
||||||
|
movie_ids = []
|
||||||
|
show_ids = []
|
||||||
|
if method in ["tmdb_discover", "tmdb_company", "tmdb_keyword"] or (method == "tmdb_network" and not is_movie):
|
||||||
|
if method in ["tmdb_company", "tmdb_network", "tmdb_keyword"]:
|
||||||
|
tmdb_id = int(data)
|
||||||
|
if method == "tmdb_company":
|
||||||
|
tmdb_name = str(self.get_company(tmdb_id))
|
||||||
|
attrs = {"with_companies": tmdb_id}
|
||||||
|
elif method == "tmdb_network":
|
||||||
|
tmdb_name = str(self.get_network(tmdb_id))
|
||||||
|
attrs = {"with_networks": tmdb_id}
|
||||||
|
elif method == "tmdb_keyword":
|
||||||
|
tmdb_name = str(self.get_keyword(tmdb_id))
|
||||||
|
attrs = {"with_keywords": tmdb_id}
|
||||||
|
limit = 0
|
||||||
|
else:
|
||||||
|
attrs = data.copy()
|
||||||
|
limit = int(attrs.pop("limit"))
|
||||||
|
if is_movie: movie_ids, amount = self.get_discover(attrs, limit, is_movie)
|
||||||
|
else: show_ids, amount = self.get_discover(attrs, limit, is_movie)
|
||||||
|
if status_message:
|
||||||
|
if method in ["tmdb_company", "tmdb_network", "tmdb_keyword"]:
|
||||||
|
logger.info("Processing {}: ({}) {} ({} {}{})".format(pretty, tmdb_id, tmdb_name, amount, media_type, "" if amount == 1 else "s"))
|
||||||
|
else:
|
||||||
|
logger.info("Processing {}: {} {}{}".format(pretty, amount, media_type, "" if amount == 1 else "s"))
|
||||||
|
for attr, value in attrs.items():
|
||||||
|
logger.info(" {}: {}".format(attr, value))
|
||||||
|
elif method in ["tmdb_popular", "tmdb_top_rated", "tmdb_now_playing", "tmdb_trending_daily", "tmdb_trending_weekly"]:
|
||||||
|
if is_movie: movie_ids = self.get_pagenation(method, data, is_movie)
|
||||||
|
else: show_ids = self.get_pagenation(method, data, is_movie)
|
||||||
|
if status_message:
|
||||||
|
logger.info("Processing {}: {} {}{}".format(pretty, data, media_type, "" if data == 1 else "s"))
|
||||||
|
else:
|
||||||
|
tmdb_id = int(data)
|
||||||
|
if method == "tmdb_list":
|
||||||
|
tmdb_list = self.get_list(tmdb_id)
|
||||||
|
tmdb_name = tmdb_list.name
|
||||||
|
for tmdb_item in tmdb_list.items:
|
||||||
|
if tmdb_item.media_type == "movie":
|
||||||
|
movie_ids.append(tmdb_item.id)
|
||||||
|
elif tmdb_item.media_type == "tv":
|
||||||
|
try: show_ids.append(self.convert_tmdb_to_tvdb(tmdb_item.id))
|
||||||
|
except Failed: pass
|
||||||
|
elif method == "tmdb_movie":
|
||||||
|
tmdb_name = str(self.get_movie(tmdb_id).title)
|
||||||
|
movie_ids.append(tmdb_id)
|
||||||
|
elif method == "tmdb_collection":
|
||||||
|
tmdb_items = self.get_collection(tmdb_id)
|
||||||
|
tmdb_name = str(tmdb_items.name)
|
||||||
|
for tmdb_item in tmdb_items.parts:
|
||||||
|
movie_ids.append(tmdb_item["id"])
|
||||||
|
elif method == "tmdb_show":
|
||||||
|
tmdb_name = str(self.get_show(tmdb_id).name)
|
||||||
|
try: show_ids.append(self.convert_tmdb_to_tvdb(tmdb_id))
|
||||||
|
except Failed: pass
|
||||||
|
else:
|
||||||
|
raise Failed("TMDb Error: Method {} not supported".format(method))
|
||||||
|
if status_message and len(movie_ids) > 0:
|
||||||
|
logger.info("Processing {}: ({}) {} ({} Movie{})".format(pretty, tmdb_id, tmdb_name, len(movie_ids), "" if len(movie_ids) == 1 else "s"))
|
||||||
|
if status_message and len(show_ids) > 0:
|
||||||
|
logger.info("Processing {}: ({}) {} ({} Show{})".format(pretty, tmdb_id, tmdb_name, len(show_ids), "" if len(show_ids) == 1 else "s"))
|
||||||
|
if status_message:
|
||||||
|
logger.debug("TMDb IDs Found: {}".format(movie_ids))
|
||||||
|
logger.debug("TVDb IDs Found: {}".format(show_ids))
|
||||||
|
return movie_ids, show_ids
|
||||||
|
|
||||||
|
def validate_tmdb_list(self, tmdb_list, tmdb_type):
|
||||||
|
tmdb_values = []
|
||||||
|
for tmdb_id in tmdb_list:
|
||||||
|
try:
|
||||||
|
if tmdb_type == "Movie": self.get_movie(tmdb_id)
|
||||||
|
elif tmdb_type == "Show": self.get_show(tmdb_id)
|
||||||
|
elif tmdb_type == "Collection": self.get_collection(tmdb_id)
|
||||||
|
elif tmdb_type == "Person": self.get_person(tmdb_id)
|
||||||
|
elif tmdb_type == "Company": self.get_company(tmdb_id)
|
||||||
|
elif tmdb_type == "Network": self.get_network(tmdb_id)
|
||||||
|
elif tmdb_type == "List": self.get_list(tmdb_id)
|
||||||
|
tmdb_values.append(tmdb_id)
|
||||||
|
except Failed as e:
|
||||||
|
logger.error(e)
|
||||||
|
if len(tmdb_values) == 0:
|
||||||
|
raise Failed("TMDb Error: No valid TMDb IDs in {}".format(tmdb_list))
|
||||||
|
return tmdb_values
|
162
modules/trakt.py
Normal file
162
modules/trakt.py
Normal file
|
@ -0,0 +1,162 @@
|
||||||
|
import logging, webbrowser
|
||||||
|
from modules import util
|
||||||
|
from modules.util import Failed, TimeoutExpired
|
||||||
|
from retrying import retry
|
||||||
|
from ruamel import yaml
|
||||||
|
from trakt import Trakt
|
||||||
|
from trakt.objects.episode import Episode
|
||||||
|
from trakt.objects.movie import Movie
|
||||||
|
from trakt.objects.season import Season
|
||||||
|
from trakt.objects.show import Show
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
logger = logging.getLogger("Plex Meta Manager")
|
||||||
|
|
||||||
|
class TraktAPI:
|
||||||
|
def __init__(self, params, authorization=None):
|
||||||
|
self.redirect_uri = "urn:ietf:wg:oauth:2.0:oob"
|
||||||
|
self.aliases = {
|
||||||
|
"trakt_trending": "Trakt Trending",
|
||||||
|
"trakt_watchlist": "Trakt Watchlist",
|
||||||
|
"trakt_list": "Trakt List"
|
||||||
|
}
|
||||||
|
self.client_id = params["client_id"]
|
||||||
|
self.client_secret = params["client_secret"]
|
||||||
|
self.config_path = params["config_path"]
|
||||||
|
self.authorization = authorization
|
||||||
|
Trakt.configuration.defaults.client(self.client_id, self.client_secret)
|
||||||
|
if not self.save_authorization(self.authorization):
|
||||||
|
if not self.refresh_authorization():
|
||||||
|
self.get_authorization()
|
||||||
|
|
||||||
|
def get_authorization(self):
|
||||||
|
url = Trakt["oauth"].authorize_url(self.redirect_uri)
|
||||||
|
logger.info("Navigate to: {}".format(url))
|
||||||
|
logger.info("If you get an OAuth error your client_id or client_secret is invalid")
|
||||||
|
webbrowser.open(url, new=2)
|
||||||
|
try: pin = util.logger_input("Trakt pin (case insensitive)", timeout=300).strip()
|
||||||
|
except TimeoutExpired: raise Failed("Input Timeout: Trakt pin required.")
|
||||||
|
if not pin: raise Failed("Trakt Error: No input Trakt pin required.")
|
||||||
|
new_authorization = Trakt["oauth"].token(pin, self.redirect_uri)
|
||||||
|
if not new_authorization:
|
||||||
|
raise Failed("Trakt Error: Invalid trakt pin. If you're sure you typed it in correctly your client_id or client_secret may be invalid")
|
||||||
|
if not self.save_authorization(new_authorization):
|
||||||
|
raise Failed("Trakt Error: New Authorization Failed")
|
||||||
|
|
||||||
|
def check_authorization(self, authorization):
|
||||||
|
try:
|
||||||
|
with Trakt.configuration.oauth.from_response(authorization, refresh=True):
|
||||||
|
if Trakt["users/settings"].get():
|
||||||
|
return True
|
||||||
|
except ValueError: pass
|
||||||
|
return False
|
||||||
|
|
||||||
|
def refresh_authorization(self):
|
||||||
|
if self.authorization and "refresh_token" in self.authorization and self.authorization["refresh_token"]:
|
||||||
|
logger.info("Refreshing Access Token...")
|
||||||
|
refreshed_authorization = Trakt["oauth"].token_refresh(self.authorization["refresh_token"], self.redirect_uri)
|
||||||
|
return self.save_authorization(refreshed_authorization)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def save_authorization(self, authorization):
|
||||||
|
if authorization and self.check_authorization(authorization):
|
||||||
|
if self.authorization != authorization:
|
||||||
|
yaml.YAML().allow_duplicate_keys = True
|
||||||
|
config, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.config_path))
|
||||||
|
config["trakt"]["authorization"] = {
|
||||||
|
"access_token": authorization["access_token"],
|
||||||
|
"token_type": authorization["token_type"],
|
||||||
|
"expires_in": authorization["expires_in"],
|
||||||
|
"refresh_token": authorization["refresh_token"],
|
||||||
|
"scope": authorization["scope"],
|
||||||
|
"created_at": authorization["created_at"]
|
||||||
|
}
|
||||||
|
logger.info("Saving authorization information to {}".format(self.config_path))
|
||||||
|
yaml.round_trip_dump(config, open(self.config_path, "w"), indent=ind, block_seq_indent=bsi)
|
||||||
|
self.authorization = authorization
|
||||||
|
Trakt.configuration.defaults.oauth.from_response(self.authorization)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def convert_tmdb_to_imdb(self, tmdb_id, is_movie=True): return self.convert_id(tmdb_id, "tmdb", "imdb", "movie" if is_movie else "show")
|
||||||
|
def convert_imdb_to_tmdb(self, imdb_id, is_movie=True): return self.convert_id(imdb_id, "imdb", "tmdb", "movie" if is_movie else "show")
|
||||||
|
def convert_tmdb_to_tvdb(self, tmdb_id): return self.convert_id(tmdb_id, "tmdb", "tvdb", "show")
|
||||||
|
def convert_tvdb_to_tmdb(self, tvdb_id): return self.convert_id(tvdb_id, "tvdb", "tmdb", "show")
|
||||||
|
def convert_tvdb_to_imdb(self, tvdb_id): return self.convert_id(tvdb_id, "tvdb", "imdb", "show")
|
||||||
|
def convert_imdb_to_tvdb(self, imdb_id): return self.convert_id(imdb_id, "imdb", "tvdb", "show")
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||||
|
def convert_id(self, external_id, from_source, to_source, media_type):
|
||||||
|
lookup = Trakt["search"].lookup(external_id, from_source, media_type)
|
||||||
|
if lookup:
|
||||||
|
lookup = lookup[0] if isinstance(lookup, list) else lookup
|
||||||
|
return lookup.get_key(to_source)
|
||||||
|
else:
|
||||||
|
raise Failed("No {} ID found for {} ID {}".format(to_source.upper().replace("B", "b"), from_source.upper().replace("B", "b"), external_id))
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||||
|
def trending(self, amount, is_movie):
|
||||||
|
return Trakt["movies" if is_movie else "shows"].trending(per_page=amount)
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||||
|
def watchlist(self, data, is_movie):
|
||||||
|
items = Trakt["users/{}/watchlist".format(data)].movies() if is_movie else Trakt["users/{}/watchlist".format(data)].shows()
|
||||||
|
if items is None: raise Failed("Trakt Error: No List found")
|
||||||
|
else: return [i for i in items]
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||||
|
def standard_list(self, data):
|
||||||
|
try: items = Trakt[urlparse(data).path].items()
|
||||||
|
except AttributeError: items = None
|
||||||
|
if items is None: raise Failed("Trakt Error: No List found")
|
||||||
|
else: return items
|
||||||
|
|
||||||
|
def validate_trakt_list(self, values):
|
||||||
|
trakt_values = []
|
||||||
|
for value in values:
|
||||||
|
try:
|
||||||
|
self.standard_list(value)
|
||||||
|
trakt_values.append(value)
|
||||||
|
except Failed as e:
|
||||||
|
logger.error(e)
|
||||||
|
if len(trakt_values) == 0:
|
||||||
|
raise Failed("Trakt Error: No valid Trakt Lists in {}".format(value))
|
||||||
|
return trakt_values
|
||||||
|
|
||||||
|
def validate_trakt_watchlist(self, values, is_movie):
|
||||||
|
trakt_values = []
|
||||||
|
for value in values:
|
||||||
|
try:
|
||||||
|
self.watchlist(value, is_movie)
|
||||||
|
trakt_values.append(value)
|
||||||
|
except Failed as e:
|
||||||
|
logger.error(e)
|
||||||
|
if len(trakt_values) == 0:
|
||||||
|
raise Failed("Trakt Error: No valid Trakt Watchlists in {}".format(value))
|
||||||
|
return trakt_values
|
||||||
|
|
||||||
|
def get_items(self, method, data, is_movie, status_message=True):
|
||||||
|
if status_message:
|
||||||
|
logger.debug("Data: {}".format(data))
|
||||||
|
pretty = self.aliases[method] if method in self.aliases else method
|
||||||
|
media_type = "Movie" if is_movie else "Show"
|
||||||
|
if method == "trakt_trending":
|
||||||
|
trakt_items = self.trending(int(data), is_movie)
|
||||||
|
if status_message:
|
||||||
|
logger.info("Processing {}: {} {}{}".format(pretty, data, media_type, "" if data == 1 else "s"))
|
||||||
|
else:
|
||||||
|
if method == "trakt_watchlist": trakt_items = self.watchlist(data, is_movie)
|
||||||
|
elif method == "trakt_list": trakt_items = self.standard_list(data)
|
||||||
|
else: raise Failed("Trakt Error: Method {} not supported".format(method))
|
||||||
|
if status_message: logger.info("Processing {}: {}".format(pretty, data))
|
||||||
|
show_ids = []
|
||||||
|
movie_ids = []
|
||||||
|
for trakt_item in trakt_items:
|
||||||
|
if isinstance(trakt_item, Movie): movie_ids.append(int(trakt_item.get_key("tmdb")))
|
||||||
|
elif isinstance(trakt_item, Show) and trakt_item.pk[1] not in show_ids: show_ids.append(int(trakt_item.pk[1]))
|
||||||
|
elif (isinstance(trakt_item, (Season, Episode))) and trakt_item.show.pk[1] not in show_ids: show_ids.append(int(trakt_item.show.pk[1]))
|
||||||
|
if status_message:
|
||||||
|
logger.debug("Trakt {} Found: {}".format(media_type, trakt_items))
|
||||||
|
logger.debug("TMDb IDs Found: {}".format(movie_ids))
|
||||||
|
logger.debug("TVDb IDs Found: {}".format(show_ids))
|
||||||
|
return movie_ids, show_ids
|
165
modules/tvdb.py
Normal file
165
modules/tvdb.py
Normal file
|
@ -0,0 +1,165 @@
|
||||||
|
import logging, math, re, requests, time
|
||||||
|
from lxml import html
|
||||||
|
from modules import util
|
||||||
|
from modules.util import Failed
|
||||||
|
from retrying import retry
|
||||||
|
|
||||||
|
logger = logging.getLogger("Plex Meta Manager")
|
||||||
|
|
||||||
|
class TVDbObj:
|
||||||
|
def __init__(self, tvdb_url, language, is_movie, TVDb):
|
||||||
|
tvdb_url = tvdb_url.strip()
|
||||||
|
if not is_movie and tvdb_url.startswith((TVDb.series_url, TVDb.alt_series_url, TVDb.series_id_url)):
|
||||||
|
self.media_type = "Series"
|
||||||
|
elif is_movie and tvdb_url.startswith((TVDb.movies_url, TVDb.alt_movies_url, TVDb.movie_id_url)):
|
||||||
|
self.media_type = "Movie"
|
||||||
|
else:
|
||||||
|
raise Failed("TVDb Error: {} must begin with {}".format(tvdb_url, TVDb.movies_url if is_movie else TVDb.series_url))
|
||||||
|
|
||||||
|
response = TVDb.send_request(tvdb_url, language)
|
||||||
|
results = html.fromstring(response).xpath("//*[text()='TheTVDB.com {} ID']/parent::node()/span/text()".format(self.media_type))
|
||||||
|
if len(results) > 0:
|
||||||
|
self.id = int(results[0])
|
||||||
|
else:
|
||||||
|
raise Failed("TVDb Error: Could not find a TVDb {} ID at the URL {}".format(self.media_type, tvdb_url))
|
||||||
|
|
||||||
|
results = html.fromstring(response).xpath("//div[@class='change_translation_text' and @data-language='eng']/@data-title")
|
||||||
|
if len(results) > 0 and len(results[0]) > 0:
|
||||||
|
self.title = results[0]
|
||||||
|
else:
|
||||||
|
raise Failed("TVDb Error: Name not found from TVDb URL: {}".format(tvdb_url))
|
||||||
|
|
||||||
|
results = html.fromstring(response).xpath("//div[@class='row hidden-xs hidden-sm']/div/img/@src")
|
||||||
|
self.poster_path = results[0] if len(results) > 0 and len(results[0]) > 0 else None
|
||||||
|
|
||||||
|
tmdb_id = None
|
||||||
|
if is_movie:
|
||||||
|
results = html.fromstring(response).xpath("//*[text()='TheMovieDB.com']/@href")
|
||||||
|
if len(results) > 0:
|
||||||
|
try: tmdb_id = util.regex_first_int(results[0], "TMDb ID")
|
||||||
|
except Failed as e: logger.error(e)
|
||||||
|
if not tmdb_id:
|
||||||
|
results = html.fromstring(response).xpath("//*[text()='IMDB']/@href")
|
||||||
|
if len(results) > 0:
|
||||||
|
try: tmdb_id = TVDb.convert_from_imdb(util.get_id_from_imdb_url(results[0]), language)
|
||||||
|
except Failed as e: logger.error(e)
|
||||||
|
self.tmdb_id = tmdb_id
|
||||||
|
self.tvdb_url = tvdb_url
|
||||||
|
self.language = language
|
||||||
|
self.is_movie = is_movie
|
||||||
|
self.TVDb = TVDb
|
||||||
|
|
||||||
|
class TVDbAPI:
|
||||||
|
def __init__(self, Cache=None, TMDb=None, Trakt=None):
|
||||||
|
self.Cache = Cache
|
||||||
|
self.TMDb = TMDb
|
||||||
|
self.Trakt = Trakt
|
||||||
|
self.site_url = "https://www.thetvdb.com"
|
||||||
|
self.alt_site_url = "https://thetvdb.com"
|
||||||
|
self.list_url = "{}/lists/".format(self.site_url)
|
||||||
|
self.alt_list_url = "{}/lists/".format(self.alt_site_url)
|
||||||
|
self.series_url = "{}/series/".format(self.site_url)
|
||||||
|
self.alt_series_url = "{}/series/".format(self.alt_site_url)
|
||||||
|
self.movies_url = "{}/movies/".format(self.site_url)
|
||||||
|
self.alt_movies_url = "{}/movies/".format(self.alt_site_url)
|
||||||
|
self.series_id_url = "{}/dereferrer/series/".format(self.site_url)
|
||||||
|
self.movie_id_url = "{}/dereferrer/movie/".format(self.site_url)
|
||||||
|
|
||||||
|
def get_series(self, language, tvdb_url=None, tvdb_id=None):
|
||||||
|
if not tvdb_url and not tvdb_id:
|
||||||
|
raise Failed("TVDB Error: getget_seriesmove requires either tvdb_url or tvdb_id")
|
||||||
|
elif not tvdb_url and tvdb_id:
|
||||||
|
tvdb_url = "{}{}".format(self.series_id_url, tvdb_id)
|
||||||
|
return TVDbObj(tvdb_url, language, False, self)
|
||||||
|
|
||||||
|
def get_movie(self, language, tvdb_url=None, tvdb_id=None):
|
||||||
|
if not tvdb_url and not tvdb_id:
|
||||||
|
raise Failed("TVDB Error: get_movie requires either tvdb_url or tvdb_id")
|
||||||
|
elif not tvdb_url and tvdb_id:
|
||||||
|
tvdb_url = "{}{}".format(self.movie_id_url, tvdb_id)
|
||||||
|
return TVDbObj(tvdb_url, language, True, self)
|
||||||
|
|
||||||
|
def get_tvdb_ids_from_url(self, tvdb_url, language):
|
||||||
|
show_ids = []
|
||||||
|
movie_ids = []
|
||||||
|
tvdb_url = tvdb_url.strip()
|
||||||
|
if tvdb_url.startswith((self.list_url, self.alt_list_url)):
|
||||||
|
try:
|
||||||
|
response = self.send_request(tvdb_url, language)
|
||||||
|
items = html.fromstring(response).xpath("//div[@class='col-xs-12 col-sm-12 col-md-8 col-lg-8 col-md-pull-4']/div[@class='row']")
|
||||||
|
for item in items:
|
||||||
|
title = item.xpath(".//div[@class='col-xs-12 col-sm-9 mt-2']//a/text()")[0]
|
||||||
|
item_url = item.xpath(".//div[@class='col-xs-12 col-sm-9 mt-2']//a/@href")[0]
|
||||||
|
if item_url.startswith("/series/"):
|
||||||
|
try: show_ids.append(self.get_series(language, tvdb_url="{}{}".format(self.site_url, item_url)).id)
|
||||||
|
except Failed as e: logger.error("{} for series {}".format(e, title))
|
||||||
|
elif item_url.startswith("/movies/"):
|
||||||
|
try:
|
||||||
|
tmdb_id = self.get_movie(language, tvdb_url="{}{}".format(self.site_url, item_url)).tmdb_id
|
||||||
|
if tmdb_id: movie_ids.append(tmdb_id)
|
||||||
|
else: raise Failed("TVDb Error: TMDb ID not found from TVDb URL: {}".format(tvdb_url))
|
||||||
|
except Failed as e:
|
||||||
|
logger.error("{} for series {}".format(e, title))
|
||||||
|
else:
|
||||||
|
logger.error("TVDb Error: Skipping Movie: {}".format(title))
|
||||||
|
if len(show_ids) > 0 or len(movie_ids) > 0:
|
||||||
|
return movie_ids, show_ids
|
||||||
|
raise Failed("TVDb Error: No TVDb IDs found at {}".format(tvdb_url))
|
||||||
|
except requests.exceptions.MissingSchema as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
raise Failed("TVDb Error: URL Lookup Failed for {}".format(tvdb_url))
|
||||||
|
else:
|
||||||
|
raise Failed("TVDb Error: {} must begin with {}".format(tvdb_url, self.list_url))
|
||||||
|
|
||||||
|
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||||
|
def send_request(self, url, language):
|
||||||
|
return requests.get(url, headers={"Accept-Language": language}).content
|
||||||
|
|
||||||
|
def get_items(self, method, data, language, status_message=True):
|
||||||
|
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||||
|
show_ids = []
|
||||||
|
movie_ids = []
|
||||||
|
if status_message:
|
||||||
|
logger.info("Processing {}: {}".format(pretty, data))
|
||||||
|
if method == "tvdb_show":
|
||||||
|
try: show_ids.append(self.get_series(language, tvdb_id=int(data)))
|
||||||
|
except ValueError: show_ids.append(self.get_series(language, tmdb_url=data))
|
||||||
|
elif method == "tvdb_movie":
|
||||||
|
try: movie_ids.append(self.get_movie(language, tvdb_id=int(data)))
|
||||||
|
except ValueError: movie_ids.append(self.get_movie(language, tmdb_url=data))
|
||||||
|
elif method == "tvdb_list":
|
||||||
|
tmdb_ids, tvdb_ids = self.get_tvdb_ids_from_url(data, language)
|
||||||
|
movie_ids.extend(tmdb_ids)
|
||||||
|
show_ids.extend(tvdb_ids)
|
||||||
|
else:
|
||||||
|
raise Failed("TVDb Error: Method {} not supported".format(method))
|
||||||
|
if status_message:
|
||||||
|
logger.debug("TMDb IDs Found: {}".format(movie_ids))
|
||||||
|
logger.debug("TVDb IDs Found: {}".format(show_ids))
|
||||||
|
return movie_ids, show_ids
|
||||||
|
|
||||||
|
def convert_from_imdb(self, imdb_id, language):
|
||||||
|
if self.Cache:
|
||||||
|
tmdb_id, tvdb_id = self.Cache.get_ids_from_imdb(imdb_id)
|
||||||
|
update = False
|
||||||
|
if not tmdb_id:
|
||||||
|
tmdb_id, update = self.Cache.get_tmdb_from_imdb(imdb_id)
|
||||||
|
if update:
|
||||||
|
tmdb_id = None
|
||||||
|
else:
|
||||||
|
tmdb_id = None
|
||||||
|
from_cache = tmdb_id is not None
|
||||||
|
|
||||||
|
if not tmdb_id and self.TMDb:
|
||||||
|
try: tmdb_id = self.TMDb.convert_imdb_to_tmdb(imdb_id)
|
||||||
|
except Failed: pass
|
||||||
|
if not tmdb_id and self.Trakt:
|
||||||
|
try: tmdb_id = self.Trakt.convert_imdb_to_tmdb(imdb_id)
|
||||||
|
except Failed: pass
|
||||||
|
try:
|
||||||
|
if tmdb_id and not from_cache: self.TMDb.get_movie(tmdb_id)
|
||||||
|
except Failed: tmdb_id = None
|
||||||
|
if not tmdb_id: raise Failed("TVDb Error: No TMDb ID found for IMDb: {}".format(imdb_id))
|
||||||
|
if self.Cache and tmdb_id and update is not False:
|
||||||
|
self.Cache.update_imdb("movie", update, imdb_id, tmdb_id)
|
||||||
|
return tmdb_id
|
618
modules/util.py
Normal file
618
modules/util.py
Normal file
|
@ -0,0 +1,618 @@
|
||||||
|
import logging, re, signal, sys, time, traceback
|
||||||
|
|
||||||
|
try:
|
||||||
|
import msvcrt
|
||||||
|
windows = True
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
windows = False
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger("Plex Meta Manager")
|
||||||
|
|
||||||
|
class TimeoutExpired(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class Failed(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def retry_if_not_failed(exception):
|
||||||
|
return not isinstance(exception, Failed)
|
||||||
|
|
||||||
|
seperating_character = "="
|
||||||
|
screen_width = 100
|
||||||
|
|
||||||
|
method_alias = {
|
||||||
|
"actors": "actor", "role": "actor", "roles": "actor",
|
||||||
|
"content_ratings": "content_rating", "contentRating": "content_rating", "contentRatings": "content_rating",
|
||||||
|
"countries": "country",
|
||||||
|
"decades": "decade",
|
||||||
|
"directors": "director",
|
||||||
|
"genres": "genre",
|
||||||
|
"studios": "studio", "network": "studio", "networks": "studio",
|
||||||
|
"writers": "writer",
|
||||||
|
"years": "year"
|
||||||
|
}
|
||||||
|
filter_alias = {
|
||||||
|
"actor": "actors",
|
||||||
|
"audio_language": "audio_language",
|
||||||
|
"collection": "collections",
|
||||||
|
"content_rating": "contentRating",
|
||||||
|
"country": "countries",
|
||||||
|
"director": "directors",
|
||||||
|
"genre": "genres",
|
||||||
|
"max_age": "max_age",
|
||||||
|
"originally_available": "originallyAvailableAt",
|
||||||
|
"rating": "rating",
|
||||||
|
"studio": "studio",
|
||||||
|
"subtitle_language": "subtitle_language",
|
||||||
|
"writer": "writers",
|
||||||
|
"video_resolution": "video_resolution",
|
||||||
|
"year": "year"
|
||||||
|
}
|
||||||
|
days_alias = {
|
||||||
|
"monday": 0, "mon": 0, "m": 0,
|
||||||
|
"tuesday": 1, "tues": 1, "tue": 1, "tu": 1, "t": 1,
|
||||||
|
"wednesday": 2, "wed": 2, "w": 2,
|
||||||
|
"thursday": 3, "thurs": 3, "thur": 3, "thu": 3, "th": 3, "r": 3,
|
||||||
|
"friday": 4, "fri": 4, "f": 4,
|
||||||
|
"saturday": 5, "sat": 5, "s": 5,
|
||||||
|
"sunday": 6, "sun": 6, "su": 6, "u": 6
|
||||||
|
}
|
||||||
|
pretty_days = {
|
||||||
|
0: "Monday",
|
||||||
|
1: "Tuesday",
|
||||||
|
2: "Wednesday",
|
||||||
|
3: "Thursday",
|
||||||
|
4: "Friday",
|
||||||
|
5: "Saturday",
|
||||||
|
6: "Sunday"
|
||||||
|
}
|
||||||
|
pretty_months = {
|
||||||
|
1: "January",
|
||||||
|
2: "February",
|
||||||
|
3: "March",
|
||||||
|
4: "April",
|
||||||
|
5: "May",
|
||||||
|
6: "June",
|
||||||
|
7: "July",
|
||||||
|
8: "August",
|
||||||
|
9: "September",
|
||||||
|
10: "October",
|
||||||
|
11: "November",
|
||||||
|
12: "December"
|
||||||
|
}
|
||||||
|
pretty_seasons = {
|
||||||
|
"winter": "Winter",
|
||||||
|
"spring": "Spring",
|
||||||
|
"summer": "Summer",
|
||||||
|
"fall": "Fall"
|
||||||
|
}
|
||||||
|
pretty_names = {
|
||||||
|
"anidb_id": "AniDB ID",
|
||||||
|
"anidb_relation": "AniDB Relation",
|
||||||
|
"anidb_popular": "AniDB Popular",
|
||||||
|
"imdb_list": "IMDb List",
|
||||||
|
"imdb_id": "IMDb ID",
|
||||||
|
"mal_id": "MyAnimeList ID",
|
||||||
|
"mal_all": "MyAnimeList All",
|
||||||
|
"mal_airing": "MyAnimeList Airing",
|
||||||
|
"mal_upcoming": "MyAnimeList Upcoming",
|
||||||
|
"mal_tv": "MyAnimeList TV",
|
||||||
|
"mal_ova": "MyAnimeList OVA",
|
||||||
|
"mal_movie": "MyAnimeList Movie",
|
||||||
|
"mal_special": "MyAnimeList Special",
|
||||||
|
"mal_popular": "MyAnimeList Popular",
|
||||||
|
"mal_favorite": "MyAnimeList Favorite",
|
||||||
|
"mal_season": "MyAnimeList Season",
|
||||||
|
"mal_suggested": "MyAnimeList Suggested",
|
||||||
|
"mal_userlist": "MyAnimeList Userlist",
|
||||||
|
"plex_all": "Plex All",
|
||||||
|
"plex_collection": "Plex Collection",
|
||||||
|
"plex_search": "Plex Search",
|
||||||
|
"tautulli_popular": "Tautulli Popular",
|
||||||
|
"tautulli_watched": "Tautulli Watched",
|
||||||
|
"tmdb_collection": "TMDb Collection",
|
||||||
|
"tmdb_collection_details": "TMDb Collection",
|
||||||
|
"tmdb_company": "TMDb Company",
|
||||||
|
"tmdb_discover": "TMDb Discover",
|
||||||
|
"tmdb_keyword": "TMDb Keyword",
|
||||||
|
"tmdb_list": "TMDb List",
|
||||||
|
"tmdb_list_details": "TMDb List",
|
||||||
|
"tmdb_movie": "TMDb Movie",
|
||||||
|
"tmdb_movie_details": "TMDb Movie",
|
||||||
|
"tmdb_network": "TMDb Network",
|
||||||
|
"tmdb_now_playing": "TMDb Now Playing",
|
||||||
|
"tmdb_popular": "TMDb Popular",
|
||||||
|
"tmdb_show": "TMDb Show",
|
||||||
|
"tmdb_show_details": "TMDb Show",
|
||||||
|
"tmdb_top_rated": "TMDb Top Rated",
|
||||||
|
"tmdb_trending_daily": "TMDb Trending Daily",
|
||||||
|
"tmdb_trending_weekly": "TMDb Trending Weekly",
|
||||||
|
"trakt_list": "Trakt List",
|
||||||
|
"trakt_trending": "Trakt Trending",
|
||||||
|
"trakt_watchlist": "Trakt Watchlist",
|
||||||
|
"tvdb_list": "TVDb List",
|
||||||
|
"tvdb_movie": "TVDb Movie",
|
||||||
|
"tvdb_show": "TVDb Show"
|
||||||
|
}
|
||||||
|
mal_ranked_name = {
|
||||||
|
"mal_all": "all",
|
||||||
|
"mal_airing": "airing",
|
||||||
|
"mal_upcoming": "upcoming",
|
||||||
|
"mal_tv": "tv",
|
||||||
|
"mal_ova": "ova",
|
||||||
|
"mal_movie": "movie",
|
||||||
|
"mal_special": "special",
|
||||||
|
"mal_popular": "bypopularity",
|
||||||
|
"mal_favorite": "favorite"
|
||||||
|
}
|
||||||
|
mal_season_sort = {
|
||||||
|
"anime_score": "anime_score",
|
||||||
|
"anime_num_list_users": "anime_num_list_users",
|
||||||
|
"score": "anime_score",
|
||||||
|
"members": "anime_num_list_users"
|
||||||
|
}
|
||||||
|
mal_pretty = {
|
||||||
|
"anime_score": "Score",
|
||||||
|
"anime_num_list_users": "Members",
|
||||||
|
"list_score": "Score",
|
||||||
|
"list_updated_at": "Last Updated",
|
||||||
|
"anime_title": "Title",
|
||||||
|
"anime_start_date": "Start Date",
|
||||||
|
"all": "All Anime",
|
||||||
|
"watching": "Currently Watching",
|
||||||
|
"completed": "Completed",
|
||||||
|
"on_hold": "On Hold",
|
||||||
|
"dropped": "Dropped",
|
||||||
|
"plan_to_watch": "Plan to Watch"
|
||||||
|
}
|
||||||
|
mal_userlist_sort = {
|
||||||
|
"score": "list_score",
|
||||||
|
"list_score": "list_score",
|
||||||
|
"last_updated": "list_updated_at",
|
||||||
|
"list_updated": "list_updated_at",
|
||||||
|
"list_updated_at": "list_updated_at",
|
||||||
|
"title": "anime_title",
|
||||||
|
"anime_title": "anime_title",
|
||||||
|
"start_date": "anime_start_date",
|
||||||
|
"anime_start_date": "anime_start_date"
|
||||||
|
}
|
||||||
|
mal_userlist_status = [
|
||||||
|
"all",
|
||||||
|
"watching",
|
||||||
|
"completed",
|
||||||
|
"on_hold",
|
||||||
|
"dropped",
|
||||||
|
"plan_to_watch"
|
||||||
|
]
|
||||||
|
pretty_ids = {
|
||||||
|
"anidbid": "AniDB",
|
||||||
|
"imdbid": "IMDb",
|
||||||
|
"mal_id": "MyAnimeList",
|
||||||
|
"themoviedb_id": "TMDb",
|
||||||
|
"thetvdb_id": "TVDb",
|
||||||
|
"tvdbid": "TVDb"
|
||||||
|
}
|
||||||
|
all_lists = [
|
||||||
|
"anidb_id",
|
||||||
|
"anidb_relation",
|
||||||
|
"anidb_popular",
|
||||||
|
"imdb_list",
|
||||||
|
"imdb_id",
|
||||||
|
"mal_id",
|
||||||
|
"mal_all",
|
||||||
|
"mal_airing",
|
||||||
|
"mal_upcoming",
|
||||||
|
"mal_tv",
|
||||||
|
"mal_ova",
|
||||||
|
"mal_movie",
|
||||||
|
"mal_special",
|
||||||
|
"mal_popular",
|
||||||
|
"mal_favorite",
|
||||||
|
"mal_season",
|
||||||
|
"mal_suggested",
|
||||||
|
"mal_userlist",
|
||||||
|
"plex_collection",
|
||||||
|
"plex_search",
|
||||||
|
"tautulli_popular",
|
||||||
|
"tautulli_watched",
|
||||||
|
"tmdb_collection",
|
||||||
|
"tmdb_collection_details",
|
||||||
|
"tmdb_company",
|
||||||
|
"tmdb_discover",
|
||||||
|
"tmdb_keyword",
|
||||||
|
"tmdb_list",
|
||||||
|
"tmdb_list_details",
|
||||||
|
"tmdb_movie",
|
||||||
|
"tmdb_movie_details",
|
||||||
|
"tmdb_network",
|
||||||
|
"tmdb_now_playing",
|
||||||
|
"tmdb_popular",
|
||||||
|
"tmdb_show",
|
||||||
|
"tmdb_show_details",
|
||||||
|
"tmdb_top_rated",
|
||||||
|
"tmdb_trending_daily",
|
||||||
|
"tmdb_trending_weekly",
|
||||||
|
"trakt_list",
|
||||||
|
"trakt_trending",
|
||||||
|
"trakt_watchlist",
|
||||||
|
"tvdb_list",
|
||||||
|
"tvdb_movie",
|
||||||
|
"tvdb_show"
|
||||||
|
]
|
||||||
|
collectionless_lists = [
|
||||||
|
"sort_title", "content_rating",
|
||||||
|
"summary", "tmdb_summary", "tmdb_description", "tmdb_biography",
|
||||||
|
"collection_order", "plex_collectionless",
|
||||||
|
"url_poster", "tmdb_poster", "tmdb_profile", "file_poster",
|
||||||
|
"url_background", "file_background",
|
||||||
|
"name_mapping"
|
||||||
|
]
|
||||||
|
dictionary_lists = [
|
||||||
|
"filters",
|
||||||
|
"mal_season",
|
||||||
|
"mal_userlist",
|
||||||
|
"plex_collectionless",
|
||||||
|
"plex_search",
|
||||||
|
"tautulli_popular",
|
||||||
|
"tautulli_watched",
|
||||||
|
"tmdb_discover"
|
||||||
|
]
|
||||||
|
plex_searches = [
|
||||||
|
"actor", "actor_details_tmdb", #"actor.not", # Waiting on PlexAPI to fix issue
|
||||||
|
"country", #"country.not",
|
||||||
|
"decade", #"decade.not",
|
||||||
|
"director", "director_details_tmdb", #"director.not",
|
||||||
|
"genre", #"genre.not",
|
||||||
|
"studio", #"studio.not",
|
||||||
|
"writer", "writer_details_tmdb", #"writer.not"
|
||||||
|
"year" #"year.not",
|
||||||
|
]
|
||||||
|
show_only_lists = [
|
||||||
|
"tmdb_network",
|
||||||
|
"tmdb_show",
|
||||||
|
"tmdb_show_details",
|
||||||
|
"tvdb_show"
|
||||||
|
]
|
||||||
|
movie_only_lists = [
|
||||||
|
"actor_details_tmdb",
|
||||||
|
"director_details_tmdb",
|
||||||
|
"tmdb_now_playing",
|
||||||
|
"writer_details_tmdb"
|
||||||
|
]
|
||||||
|
movie_only_searches = [
|
||||||
|
"actor", "actor_details_tmdb", #"actor.not", # Waiting on PlexAPI to fix issue
|
||||||
|
"country", #"country.not",
|
||||||
|
"decade", #"decade.not",
|
||||||
|
"director", "director_details_tmdb", #"director.not",
|
||||||
|
"writer", "writer_details_tmdb" #"writer.not"
|
||||||
|
]
|
||||||
|
count_lists = [
|
||||||
|
"anidb_popular",
|
||||||
|
"mal_all",
|
||||||
|
"mal_airing",
|
||||||
|
"mal_upcoming",
|
||||||
|
"mal_tv",
|
||||||
|
"mal_ova",
|
||||||
|
"mal_movie",
|
||||||
|
"mal_special",
|
||||||
|
"mal_popular",
|
||||||
|
"mal_favorite",
|
||||||
|
"mal_suggested",
|
||||||
|
"tmdb_popular",
|
||||||
|
"tmdb_top_rated",
|
||||||
|
"tmdb_now_playing",
|
||||||
|
"tmdb_trending_daily",
|
||||||
|
"tmdb_trending_weekly",
|
||||||
|
"trakt_trending"
|
||||||
|
]
|
||||||
|
tmdb_lists = [
|
||||||
|
"tmdb_collection",
|
||||||
|
"tmdb_collection_details",
|
||||||
|
"tmdb_company",
|
||||||
|
"tmdb_discover",
|
||||||
|
"tmdb_keyword",
|
||||||
|
"tmdb_list",
|
||||||
|
"tmdb_list_details",
|
||||||
|
"tmdb_movie",
|
||||||
|
"tmdb_movie_details",
|
||||||
|
"tmdb_network",
|
||||||
|
"tmdb_now_playing",
|
||||||
|
"tmdb_popular",
|
||||||
|
"tmdb_show",
|
||||||
|
"tmdb_show_details",
|
||||||
|
"tmdb_top_rated",
|
||||||
|
"tmdb_trending_daily",
|
||||||
|
"tmdb_trending_weekly"
|
||||||
|
]
|
||||||
|
tmdb_type = {
|
||||||
|
"tmdb_collection": "Collection",
|
||||||
|
"tmdb_collection_details": "Collection",
|
||||||
|
"tmdb_company": "Company",
|
||||||
|
"tmdb_keyword": "Keyword",
|
||||||
|
"tmdb_list": "List",
|
||||||
|
"tmdb_list_details": "List",
|
||||||
|
"tmdb_movie": "Movie",
|
||||||
|
"tmdb_movie_details": "Movie",
|
||||||
|
"tmdb_network": "Network",
|
||||||
|
"tmdb_show": "Show",
|
||||||
|
"tmdb_show_details": "Show"
|
||||||
|
}
|
||||||
|
all_filters = [
|
||||||
|
"actor", "actor.not",
|
||||||
|
"audio_language", "audio_language.not",
|
||||||
|
"collection", "collection.not",
|
||||||
|
"content_rating", "content_rating.not",
|
||||||
|
"country", "country.not",
|
||||||
|
"director", "director.not",
|
||||||
|
"genre", "genre.not",
|
||||||
|
"max_age",
|
||||||
|
"originally_available.gte", "originally_available.lte",
|
||||||
|
"rating.gte", "rating.lte",
|
||||||
|
"studio", "studio.not",
|
||||||
|
"subtitle_language", "subtitle_language.not",
|
||||||
|
"video_resolution", "video_resolution.not",
|
||||||
|
"writer", "writer.not",
|
||||||
|
"year", "year.gte", "year.lte", "year.not"
|
||||||
|
]
|
||||||
|
movie_only_filters = [
|
||||||
|
"audio_language", "audio_language.not",
|
||||||
|
"country", "country.not",
|
||||||
|
"director", "director.not",
|
||||||
|
"subtitle_language", "subtitle_language.not",
|
||||||
|
"video_resolution", "video_resolution.not",
|
||||||
|
"writer", "writer.not"
|
||||||
|
]
|
||||||
|
all_details = [
|
||||||
|
"sort_title", "content_rating",
|
||||||
|
"summary", "tmdb_summary", "tmdb_description", "tmdb_biography",
|
||||||
|
"collection_mode", "collection_order",
|
||||||
|
"url_poster", "tmdb_poster", "tmdb_profile", "file_poster",
|
||||||
|
"url_background", "file_background",
|
||||||
|
"name_mapping", "add_to_arr"
|
||||||
|
]
|
||||||
|
discover_movie = [
|
||||||
|
"language", "with_original_language", "region", "sort_by",
|
||||||
|
"certification_country", "certification", "certification.lte", "certification.gte",
|
||||||
|
"include_adult",
|
||||||
|
"primary_release_year", "primary_release_date.gte", "primary_release_date.lte",
|
||||||
|
"release_date.gte", "release_date.lte", "year",
|
||||||
|
"vote_count.gte", "vote_count.lte",
|
||||||
|
"vote_average.gte", "vote_average.lte",
|
||||||
|
"with_cast", "with_crew", "with_people",
|
||||||
|
"with_companies",
|
||||||
|
"with_genres", "without_genres",
|
||||||
|
"with_keywords", "without_keywords",
|
||||||
|
"with_runtime.gte", "with_runtime.lte"
|
||||||
|
]
|
||||||
|
discover_tv = [
|
||||||
|
"language", "with_original_language", "timezone", "sort_by",
|
||||||
|
"air_date.gte", "air_date.lte",
|
||||||
|
"first_air_date.gte", "first_air_date.lte", "first_air_date_year",
|
||||||
|
"vote_count.gte", "vote_count.lte",
|
||||||
|
"vote_average.gte", "vote_average.lte",
|
||||||
|
"with_genres", "without_genres",
|
||||||
|
"with_keywords", "without_keywords",
|
||||||
|
"with_networks", "with_companies",
|
||||||
|
"with_runtime.gte", "with_runtime.lte",
|
||||||
|
"include_null_first_air_dates",
|
||||||
|
"screened_theatrically"
|
||||||
|
]
|
||||||
|
discover_movie_sort = [
|
||||||
|
"popularity.asc", "popularity.desc",
|
||||||
|
"release_date.asc", "release_date.desc",
|
||||||
|
"revenue.asc", "revenue.desc",
|
||||||
|
"primary_release_date.asc", "primary_release_date.desc",
|
||||||
|
"original_title.asc", "original_title.desc",
|
||||||
|
"vote_average.asc", "vote_average.desc",
|
||||||
|
"vote_count.asc", "vote_count.desc"
|
||||||
|
]
|
||||||
|
discover_tv_sort = [
|
||||||
|
"vote_average.desc", "vote_average.asc",
|
||||||
|
"first_air_date.desc", "first_air_date.asc",
|
||||||
|
"popularity.desc", "popularity.asc"
|
||||||
|
]
|
||||||
|
|
||||||
|
def adjust_space(old_length, display_title):
|
||||||
|
display_title = str(display_title)
|
||||||
|
space_length = old_length - len(display_title)
|
||||||
|
if space_length > 0:
|
||||||
|
display_title += " " * space_length
|
||||||
|
return display_title
|
||||||
|
|
||||||
|
def make_ordinal(n):
|
||||||
|
n = int(n)
|
||||||
|
suffix = ["th", "st", "nd", "rd", "th"][min(n % 10, 4)]
|
||||||
|
if 11 <= (n % 100) <= 13:
|
||||||
|
suffix = "th"
|
||||||
|
return str(n) + suffix
|
||||||
|
|
||||||
|
def choose_from_list(datalist, description, data=None, list_type="title", exact=False):
|
||||||
|
if len(datalist) > 0:
|
||||||
|
if len(datalist) == 1 and (description != "collection" or datalist[0].title == data):
|
||||||
|
return datalist[0]
|
||||||
|
message = "Multiple {}s Found\n0) {}".format(description, "Create New Collection: {}".format(data) if description == "collection" else "Do Nothing")
|
||||||
|
for i, d in enumerate(datalist, 1):
|
||||||
|
if list_type == "title":
|
||||||
|
if d.title == data:
|
||||||
|
return d
|
||||||
|
message += "\n{}) {}".format(i, d.title)
|
||||||
|
else:
|
||||||
|
message += "\n{}) [{}] {}".format(i, d[0], d[1])
|
||||||
|
if exact:
|
||||||
|
return None
|
||||||
|
print_multiline(message, info=True)
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
selection = int(logger_input("Choose {} number".format(description))) - 1
|
||||||
|
if selection >= 0: return datalist[selection]
|
||||||
|
elif selection == -1: return None
|
||||||
|
else: logger.info("Invalid {} number".format(description))
|
||||||
|
except IndexError: logger.info("Invalid {} number".format(description))
|
||||||
|
except TimeoutExpired:
|
||||||
|
if list_type == "title":
|
||||||
|
logger.warning("Input Timeout: using {}".format(data))
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
logger.warning("Input Timeout: using {}".format(datalist[0][1]))
|
||||||
|
return datalist[0][1]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_list(data):
|
||||||
|
if isinstance(data, list): return data
|
||||||
|
elif isinstance(data, dict): return [data]
|
||||||
|
else: return str(data).split(", ")
|
||||||
|
|
||||||
|
def get_int_list(data, id_type):
|
||||||
|
values = get_list(data)
|
||||||
|
int_values = []
|
||||||
|
for value in values:
|
||||||
|
try: int_values.append(regex_first_int(value, id_type))
|
||||||
|
except Failed as e: logger.error(e)
|
||||||
|
return int_values
|
||||||
|
|
||||||
|
def get_year_list(data, method):
|
||||||
|
values = get_list(data)
|
||||||
|
final_years = []
|
||||||
|
current_year = datetime.datetime.now().year
|
||||||
|
for value in values:
|
||||||
|
try:
|
||||||
|
if "-" in value:
|
||||||
|
year_range = re.search("(\\d{4})-(\\d{4}|NOW)", str(value))
|
||||||
|
start = year_range.group(1)
|
||||||
|
end = year_range.group(2)
|
||||||
|
if end == "NOW":
|
||||||
|
end = current_year
|
||||||
|
if int(start) < 1800 or int(start) > current_year: logger.error("Collection Error: Skipping {} starting year {} must be between 1800 and {}".format(method, start, current_year))
|
||||||
|
elif int(end) < 1800 or int(end) > current_year: logger.error("Collection Error: Skipping {} ending year {} must be between 1800 and {}".format(method, end, current_year))
|
||||||
|
elif int(start) > int(end): logger.error("Collection Error: Skipping {} starting year {} cannot be greater then ending year {}".format(method, start, end))
|
||||||
|
else:
|
||||||
|
for i in range(int(start), int(end) + 1):
|
||||||
|
final_years.append(i)
|
||||||
|
else:
|
||||||
|
year = re.search("(\\d+)", str(value)).group(1)
|
||||||
|
if int(start) < 1800 or int(start) > current_year:
|
||||||
|
logger.error("Collection Error: Skipping {} year {} must be between 1800 and {}".format(method, year, current_year))
|
||||||
|
else:
|
||||||
|
if len(str(year)) != len(str(value)):
|
||||||
|
logger.warning("Collection Warning: {} can be replaced with {}".format(value, year))
|
||||||
|
final_years.append(year)
|
||||||
|
except AttributeError:
|
||||||
|
logger.error("Collection Error: Skipping {} failed to parse year from {}".format(method, value))
|
||||||
|
return final_years
|
||||||
|
|
||||||
|
def logger_input(prompt, timeout=60):
|
||||||
|
if windows: return windows_input(prompt, timeout)
|
||||||
|
elif hasattr(signal, "SIGALRM"): return unix_input(prompt, timeout)
|
||||||
|
else: raise SystemError("Input Timeout not supported on this system")
|
||||||
|
|
||||||
|
def alarm_handler(signum, frame):
|
||||||
|
raise TimeoutExpired
|
||||||
|
|
||||||
|
def unix_input(prompt, timeout=60):
|
||||||
|
prompt = "| {}: ".format(prompt)
|
||||||
|
signal.signal(signal.SIGALRM, alarm_handler)
|
||||||
|
signal.alarm(timeout)
|
||||||
|
try: return input(prompt)
|
||||||
|
finally: signal.alarm(0)
|
||||||
|
|
||||||
|
def old_windows_input(prompt, timeout=60, timer=time.monotonic):
|
||||||
|
prompt = "| {}: ".format(prompt)
|
||||||
|
sys.stdout.write(prompt)
|
||||||
|
sys.stdout.flush()
|
||||||
|
endtime = timer() + timeout
|
||||||
|
result = []
|
||||||
|
while timer() < endtime:
|
||||||
|
if msvcrt.kbhit():
|
||||||
|
result.append(msvcrt.getwche())
|
||||||
|
if result[-1] == "\n":
|
||||||
|
out = "".join(result[:-1])
|
||||||
|
logger.debug("{}{}".format(prompt[2:], out))
|
||||||
|
return out
|
||||||
|
time.sleep(0.04)
|
||||||
|
raise TimeoutExpired
|
||||||
|
|
||||||
|
def windows_input(prompt, timeout=5):
|
||||||
|
sys.stdout.write("| {}: ".format(prompt))
|
||||||
|
sys.stdout.flush()
|
||||||
|
result = []
|
||||||
|
start_time = time.time()
|
||||||
|
while True:
|
||||||
|
if msvcrt.kbhit():
|
||||||
|
chr = msvcrt.getwche()
|
||||||
|
if ord(chr) == 13: # enter_key
|
||||||
|
out = "".join(result)
|
||||||
|
print("")
|
||||||
|
logger.debug("{}: {}".format(prompt, out))
|
||||||
|
return out
|
||||||
|
elif ord(chr) >= 32: #space_char
|
||||||
|
result.append(chr)
|
||||||
|
if (time.time() - start_time) > timeout:
|
||||||
|
print("")
|
||||||
|
raise TimeoutExpired
|
||||||
|
|
||||||
|
|
||||||
|
def print_multiline(lines, info=False, warning=False, error=False, critical=False):
|
||||||
|
for i, line in enumerate(lines.split("\n")):
|
||||||
|
if critical: logger.critical(line)
|
||||||
|
elif error: logger.error(line)
|
||||||
|
elif warning: logger.warning(line)
|
||||||
|
elif info: logger.info(line)
|
||||||
|
else: logger.debug(line)
|
||||||
|
if i == 0:
|
||||||
|
logger.handlers[1].setFormatter(logging.Formatter(" " * 65 + "| %(message)s"))
|
||||||
|
logger.handlers[1].setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(levelname)-10s | %(message)s"))
|
||||||
|
|
||||||
|
def print_stacktrace():
|
||||||
|
print_multiline(traceback.format_exc())
|
||||||
|
|
||||||
|
def my_except_hook(exctype, value, tb):
|
||||||
|
for line in traceback.format_exception(etype=exctype, value=value, tb=tb):
|
||||||
|
print_multiline(line, critical=True)
|
||||||
|
|
||||||
|
def get_id_from_imdb_url(imdb_url):
|
||||||
|
match = re.search("(tt\\d+)", str(imdb_url))
|
||||||
|
if match: return match.group(1)
|
||||||
|
else: raise Failed("Regex Error: Failed to parse IMDb ID from IMDb URL: {}".format(imdb_url))
|
||||||
|
|
||||||
|
def regex_first_int(data, id_type, default=None):
|
||||||
|
match = re.search("(\\d+)", str(data))
|
||||||
|
if match:
|
||||||
|
return int(match.group(1))
|
||||||
|
elif default:
|
||||||
|
logger.warning("Regex Warning: Failed to parse {} from {} using {} as default".format(id_type, data, default))
|
||||||
|
return int(default)
|
||||||
|
else:
|
||||||
|
raise Failed("Regex Error: Failed to parse {} from {}".format(id_type, data))
|
||||||
|
|
||||||
|
def remove_not(method):
|
||||||
|
return method[:-4] if method.endswith(".not") else method
|
||||||
|
|
||||||
|
def get_centered_text(text):
|
||||||
|
if len(text) > screen_width - 2:
|
||||||
|
raise Failed("text must be shorter then screen_width")
|
||||||
|
space = screen_width - len(text) - 2
|
||||||
|
if space % 2 == 1:
|
||||||
|
text += " "
|
||||||
|
space -= 1
|
||||||
|
side = int(space / 2)
|
||||||
|
return "{}{}{}".format(" " * side, text, " " * side)
|
||||||
|
|
||||||
|
def seperator(text=None):
|
||||||
|
logger.handlers[0].setFormatter(logging.Formatter("%(message)-{}s".format(screen_width - 2)))
|
||||||
|
logger.handlers[1].setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(levelname)-10s %(message)-{}s".format(screen_width - 2)))
|
||||||
|
logger.info("|{}|".format(seperating_character * screen_width))
|
||||||
|
if text:
|
||||||
|
logger.info("| {} |".format(get_centered_text(text)))
|
||||||
|
logger.info("|{}|".format(seperating_character * screen_width))
|
||||||
|
logger.handlers[0].setFormatter(logging.Formatter("| %(message)-{}s |".format(screen_width - 2)))
|
||||||
|
logger.handlers[1].setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(levelname)-10s | %(message)-{}s |".format(screen_width - 2)))
|
||||||
|
|
||||||
|
def print_return(length, text):
|
||||||
|
print(adjust_space(length, "| {}".format(text)), end="\r")
|
||||||
|
return len(text) + 2
|
||||||
|
|
||||||
|
def print_end(length, text=None):
|
||||||
|
if text: logger.info(adjust_space(length, text))
|
||||||
|
else: print(adjust_space(length, " "), end="\r")
|
86
plex_meta_manager.py
Normal file
86
plex_meta_manager.py
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
import argparse, logging, os, re, schedule, sys, time, traceback, datetime
|
||||||
|
from modules import tests, util
|
||||||
|
from modules.config import Config
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument("--test", dest="test", help=argparse.SUPPRESS, action="store_true", default=False)
|
||||||
|
parser.add_argument("-c", "--config", dest="config", help="Run with desired *.yml file", type=str)
|
||||||
|
parser.add_argument("-t", "--time", dest="time", help="Time to update each day use format HH:MM (Default: 03:00)", default="03:00", type=str)
|
||||||
|
parser.add_argument("-r", "--run", dest="run", help="Run without the scheduler", action="store_true", default=False)
|
||||||
|
parser.add_argument("-d", "--divider", dest="divider", help="Character that divides the sections (Default: '=')", default="=", type=str)
|
||||||
|
parser.add_argument("-w", "--width", dest="width", help="Screen Width (Default: 100)", default=100, type=int)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if not re.match("^([0-1]?[0-9]|2[0-3]):[0-5][0-9]$", args.time):
|
||||||
|
raise util.Failed("Argument Error: time argument invalid: {} must be in the HH:MM format".format(args.time))
|
||||||
|
|
||||||
|
util.seperating_character = args.divider[0]
|
||||||
|
if 90 <= args.width <= 300:
|
||||||
|
util.screen_width = args.width
|
||||||
|
else:
|
||||||
|
raise util.Failed("Argument Error: width argument invalid: {} must be an integer between 90 and 300".format(args.width))
|
||||||
|
|
||||||
|
default_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "config")
|
||||||
|
if args.config and os.path.exists(args.config): default_dir = os.path.join(os.path.dirname(os.path.abspath(args.config)))
|
||||||
|
elif args.config and not os.path.exists(args.config): raise util.Failed("Config Error: config not found at {}".format(os.path.abspath(args.config)))
|
||||||
|
elif not os.path.exists(os.path.join(default_dir, "config.yml")): raise util.Failed("Config Error: config not found at {}".format(os.path.abspath(default_dir)))
|
||||||
|
|
||||||
|
os.makedirs(os.path.join(default_dir, "logs"), exist_ok=True)
|
||||||
|
|
||||||
|
logger = logging.getLogger("Plex Meta Manager")
|
||||||
|
logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
def fmt_filter(record):
|
||||||
|
record.levelname = "[{}]".format(record.levelname)
|
||||||
|
record.filename = "[{}:{}]".format(record.filename, record.lineno)
|
||||||
|
return True
|
||||||
|
|
||||||
|
file_handler = logging.handlers.TimedRotatingFileHandler(os.path.join(default_dir, "logs", "meta.log"), when="midnight", backupCount=10, encoding="utf-8")
|
||||||
|
file_handler.addFilter(fmt_filter)
|
||||||
|
file_handler.setFormatter(logging.Formatter("[%(asctime)s] %(filename)-27s %(levelname)-10s | %(message)-100s |"))
|
||||||
|
|
||||||
|
cmd_handler = logging.StreamHandler()
|
||||||
|
cmd_handler.setFormatter(logging.Formatter("| %(message)-100s |"))
|
||||||
|
cmd_handler.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
logger.addHandler(cmd_handler)
|
||||||
|
logger.addHandler(file_handler)
|
||||||
|
|
||||||
|
sys.excepthook = util.my_except_hook
|
||||||
|
|
||||||
|
util.seperator()
|
||||||
|
logger.info(util.get_centered_text(" "))
|
||||||
|
logger.info(util.get_centered_text(" ____ _ __ __ _ __ __ "))
|
||||||
|
logger.info(util.get_centered_text("| _ \| | _____ __ | \/ | ___| |_ __ _ | \/ | __ _ _ __ __ _ __ _ ___ _ __ "))
|
||||||
|
logger.info(util.get_centered_text("| |_) | |/ _ \ \/ / | |\/| |/ _ \ __/ _` | | |\/| |/ _` | '_ \ / _` |/ _` |/ _ \ '__|"))
|
||||||
|
logger.info(util.get_centered_text("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | "))
|
||||||
|
logger.info(util.get_centered_text("|_| |_|\___/_/\_\ |_| |_|\___|\__\__,_| |_| |_|\__,_|_| |_|\__,_|\__, |\___|_| "))
|
||||||
|
logger.info(util.get_centered_text(" |___/ "))
|
||||||
|
logger.info(util.get_centered_text(" Version: 1.0.0 "))
|
||||||
|
util.seperator()
|
||||||
|
|
||||||
|
if args.test:
|
||||||
|
tests.run_tests(default_dir)
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
def start(config_path):
|
||||||
|
try:
|
||||||
|
util.seperator("Starting Daily Run")
|
||||||
|
config = Config(default_dir, config_path)
|
||||||
|
config.update_libraries()
|
||||||
|
except Exception as e:
|
||||||
|
util.print_stacktrace()
|
||||||
|
logger.critical(e)
|
||||||
|
logger.info("")
|
||||||
|
util.seperator("Finished Daily Run")
|
||||||
|
|
||||||
|
try:
|
||||||
|
if args.run:
|
||||||
|
start(args.config)
|
||||||
|
else:
|
||||||
|
schedule.every().day.at(args.time).do(start, args.config)
|
||||||
|
while True:
|
||||||
|
schedule.run_pending()
|
||||||
|
time.sleep(1)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
util.seperator("Exiting Plex Meta Manager")
|
13
requirements.txt
Normal file
13
requirements.txt
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
# Remove
|
||||||
|
# Less common, pinned
|
||||||
|
PlexAPI==4.2.0
|
||||||
|
tmdbv3api==1.7.3
|
||||||
|
trakt.py==4.2.0
|
||||||
|
# More common, flexible
|
||||||
|
bs4
|
||||||
|
lxml
|
||||||
|
requests>=2.4.2
|
||||||
|
ruamel.yaml
|
||||||
|
schedule
|
||||||
|
retrying
|
||||||
|
mutagen
|
Loading…
Reference in a new issue