mirror of
https://github.com/meisnate12/Plex-Meta-Manager
synced 2025-02-16 22:08:25 +00:00
commit
f727068317
27 changed files with 2685 additions and 2739 deletions
|
@ -7,4 +7,13 @@
|
|||
README.md
|
||||
LICENSE
|
||||
.gitignore
|
||||
.dockerignore
|
||||
.git
|
||||
.github
|
||||
*.psd
|
||||
config/**/*
|
||||
config
|
||||
Dockerfile
|
||||
venv
|
||||
.idea
|
||||
test.py
|
31
Dockerfile
31
Dockerfile
|
@ -1,20 +1,15 @@
|
|||
FROM python:3-slim
|
||||
VOLUME /config
|
||||
FROM python:3.9-slim
|
||||
RUN echo "**** install system packages ****" \
|
||||
&& apt-get update \
|
||||
&& apt-get upgrade -y --no-install-recommends \
|
||||
&& apt-get install -y tzdata --no-install-recommends \
|
||||
&& apt-get install -y gcc g++ libxml2-dev libxslt-dev libz-dev
|
||||
COPY requirements.txt /
|
||||
RUN echo "**** install python packages ****" \
|
||||
&& pip3 install --no-cache-dir --upgrade --requirement /requirements.txt \
|
||||
&& apt-get autoremove -y \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /requirements.txt /tmp/* /var/tmp/* /var/lib/apt/lists/*
|
||||
COPY . /
|
||||
RUN \
|
||||
echo "**** install system packages ****" && \
|
||||
apt-get update && \
|
||||
apt-get upgrade -y --no-install-recommends && \
|
||||
apt-get install -y tzdata --no-install-recommends && \
|
||||
apt-get install -y gcc g++ libxml2-dev libxslt-dev libz-dev && \
|
||||
echo "**** install python packages ****" && \
|
||||
pip3 install --no-cache-dir --upgrade --requirement /requirements.txt && \
|
||||
echo "**** cleanup ****" && \
|
||||
apt-get autoremove -y && \
|
||||
apt-get clean && \
|
||||
rm -rf \
|
||||
/requirements.txt \
|
||||
/tmp/* \
|
||||
/var/tmp/* \
|
||||
/var/lib/apt/lists/*
|
||||
VOLUME /config
|
||||
ENTRYPOINT ["python3", "plex_meta_manager.py"]
|
||||
|
|
|
@ -89,3 +89,6 @@ mal:
|
|||
token_type:
|
||||
expires_in:
|
||||
refresh_token:
|
||||
anidb: # Optional
|
||||
username: ######
|
||||
password: ######
|
|
@ -1,42 +1,54 @@
|
|||
import logging, requests
|
||||
from lxml import html
|
||||
import logging, time
|
||||
from modules import util
|
||||
from modules.util import Failed
|
||||
from retrying import retry
|
||||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
builders = ["anidb_id", "anidb_relation", "anidb_popular"]
|
||||
builders = ["anidb_id", "anidb_relation", "anidb_popular", "anidb_tag"]
|
||||
base_url = "https://anidb.net"
|
||||
urls = {
|
||||
"anime": f"{base_url}/anime",
|
||||
"popular": f"{base_url}/latest/anime/popular/?h=1",
|
||||
"relation": "/relation/graph",
|
||||
"tag": f"{base_url}/tag",
|
||||
"login": f"{base_url}/perl-bin/animedb.pl"
|
||||
}
|
||||
|
||||
class AniDB:
|
||||
def __init__(self, config):
|
||||
def __init__(self, config, params):
|
||||
self.config = config
|
||||
self.urls = {
|
||||
"anime": "https://anidb.net/anime",
|
||||
"popular": "https://anidb.net/latest/anime/popular/?h=1",
|
||||
"relation": "/relation/graph"
|
||||
}
|
||||
self.username = params["username"] if params else None
|
||||
self.password = params["password"] if params else None
|
||||
if params and not self._login(self.username, self.password).xpath("//li[@class='sub-menu my']/@title"):
|
||||
raise Failed("AniDB Error: Login failed")
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||
def _request(self, url, language):
|
||||
return html.fromstring(requests.get(url, headers={"Accept-Language": language, "User-Agent": "Mozilla/5.0 x64"}).content)
|
||||
def _request(self, url, language=None, post=None):
|
||||
if post:
|
||||
return self.config.post_html(url, post, headers=util.header(language))
|
||||
else:
|
||||
return self.config.get_html(url, headers=util.header(language))
|
||||
|
||||
def _login(self, username, password):
|
||||
data = {"show": "main", "xuser": username, "xpass": password, "xdoautologin": "on"}
|
||||
return self._request(urls["login"], post=data)
|
||||
|
||||
def _popular(self, language):
|
||||
response = self._request(self.urls["popular"], language)
|
||||
response = self._request(urls["popular"], language=language)
|
||||
return util.get_int_list(response.xpath("//td[@class='name anime']/a/@href"), "AniDB ID")
|
||||
|
||||
def _relations(self, anidb_id, language):
|
||||
response = self._request(f"{self.urls['anime']}/{anidb_id}{self.urls['relation']}", language)
|
||||
response = self._request(f"{urls['anime']}/{anidb_id}{urls['relation']}", language=language)
|
||||
return util.get_int_list(response.xpath("//area/@href"), "AniDB ID")
|
||||
|
||||
def _validate(self, anidb_id, language):
|
||||
response = self._request(f"{self.urls['anime']}/{anidb_id}", language)
|
||||
response = self._request(f"{urls['anime']}/{anidb_id}", language=language)
|
||||
ids = response.xpath(f"//*[text()='a{anidb_id}']/text()")
|
||||
if len(ids) > 0:
|
||||
return util.regex_first_int(ids[0], "AniDB ID")
|
||||
raise Failed(f"AniDB Error: AniDB ID: {anidb_id} not found")
|
||||
|
||||
def validate_anidb_list(self, anidb_list, language):
|
||||
def validate_anidb_ids(self, anidb_ids, language):
|
||||
anidb_list = util.get_int_list(anidb_ids, "AniDB ID")
|
||||
anidb_values = []
|
||||
for anidb_id in anidb_list:
|
||||
try:
|
||||
|
@ -47,20 +59,35 @@ class AniDB:
|
|||
return anidb_values
|
||||
raise Failed(f"AniDB Error: No valid AniDB IDs in {anidb_list}")
|
||||
|
||||
def get_items(self, method, data, language):
|
||||
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||
def _tag(self, tag, limit, language):
|
||||
anidb_ids = []
|
||||
current_url = f"{urls['tag']}/{tag}"
|
||||
while True:
|
||||
response = self._request(current_url, language=language)
|
||||
anidb_ids.extend(util.get_int_list(response.xpath("//td[@class='name main anime']/a/@href"), "AniDB ID"))
|
||||
next_page_list = response.xpath("//li[@class='next']/a/@href")
|
||||
if len(anidb_ids) >= limit or len(next_page_list) == 0:
|
||||
break
|
||||
time.sleep(2)
|
||||
current_url = f"{base_url}{next_page_list[0]}"
|
||||
return anidb_ids[:limit]
|
||||
|
||||
def get_anidb_ids(self, method, data, language):
|
||||
anidb_ids = []
|
||||
if method == "anidb_popular":
|
||||
logger.info(f"Processing {pretty}: {data} Anime")
|
||||
logger.info(f"Processing AniDB Popular: {data} Anime")
|
||||
anidb_ids.extend(self._popular(language)[:data])
|
||||
elif method == "anidb_tag":
|
||||
logger.info(f"Processing AniDB Tag: {data['limit'] if data['limit'] > 0 else 'All'} Anime from the Tag ID: {data['tag']}")
|
||||
anidb_ids = self._tag(data["tag"], data["limit"], language)
|
||||
elif method == "anidb_id":
|
||||
logger.info(f"Processing AniDB ID: {data}")
|
||||
anidb_ids.append(data)
|
||||
elif method == "anidb_relation":
|
||||
logger.info(f"Processing AniDB Relation: {data}")
|
||||
anidb_ids.extend(self._relations(data, language))
|
||||
else:
|
||||
logger.info(f"Processing {pretty}: {data}")
|
||||
if method == "anidb_id": anidb_ids.append(data)
|
||||
elif method == "anidb_relation": anidb_ids.extend(self._relations(data, language))
|
||||
else: raise Failed(f"AniDB Error: Method {method} not supported")
|
||||
movie_ids, show_ids = self.config.Convert.anidb_to_ids(anidb_ids)
|
||||
raise Failed(f"AniDB Error: Method {method} not supported")
|
||||
logger.debug("")
|
||||
logger.debug(f"{len(anidb_ids)} AniDB IDs Found: {anidb_ids}")
|
||||
logger.debug(f"{len(movie_ids)} TMDb IDs Found: {movie_ids}")
|
||||
logger.debug(f"{len(show_ids)} TVDb IDs Found: {show_ids}")
|
||||
return movie_ids, show_ids
|
||||
return anidb_ids
|
||||
|
|
|
@ -1,52 +1,63 @@
|
|||
import logging, requests, time
|
||||
import logging, time
|
||||
from modules import util
|
||||
from modules.util import Failed
|
||||
from retrying import retry
|
||||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
builders = [
|
||||
"anilist_genre",
|
||||
"anilist_id",
|
||||
"anilist_popular",
|
||||
"anilist_relations",
|
||||
"anilist_season",
|
||||
"anilist_studio",
|
||||
"anilist_tag",
|
||||
"anilist_top_rated"
|
||||
builders = ["anilist_id", "anilist_popular", "anilist_relations", "anilist_studio", "anilist_top_rated", "anilist_search"]
|
||||
pretty_names = {"score": "Average Score", "popular": "Popularity"}
|
||||
attr_translation = {"year": "seasonYear", "adult": "isAdult", "start": "startDate", "end": "endDate", "tag_category": "tagCategory", "score": "averageScore", "min_tag_percent": "minimumTagRank"}
|
||||
mod_translation = {"": "in", "not": "not_in", "before": "greater", "after": "lesser", "gt": "greater", "gte": "greater", "lt": "lesser", "lte": "lesser"}
|
||||
mod_searches = [
|
||||
"start.before", "start.after", "end.before", "end.after",
|
||||
"format", "format.not", "status", "status.not", "genre", "genre.not", "tag", "tag.not", "tag_category", "tag_category.not",
|
||||
"episodes.gt", "episodes.gte", "episodes.lt", "episodes.lte", "duration.gt", "duration.gte", "duration.lt", "duration.lte",
|
||||
"score.gt", "score.gte", "score.lt", "score.lte", "popularity.gt", "popularity.gte", "popularity.lt", "popularity.lte"
|
||||
]
|
||||
pretty_names = {
|
||||
"score": "Average Score",
|
||||
"popular": "Popularity"
|
||||
no_mod_searches = ["search", "season", "year", "adult", "min_tag_percent"]
|
||||
searches = mod_searches + no_mod_searches
|
||||
search_types = {
|
||||
"search": "String", "season": "MediaSeason", "seasonYear": "Int", "isAdult": "Boolean", "minimumTagRank": "Int",
|
||||
"startDate": "FuzzyDateInt", "endDate": "FuzzyDateInt", "format": "[MediaFormat]", "status": "[MediaStatus]",
|
||||
"genre": "[String]", "tag": "[String]", "tagCategory": "[String]",
|
||||
"episodes": "Int", "duration": "Int", "averageScore": "Int", "popularity": "Int"
|
||||
}
|
||||
tag_query = "query{MediaTagCollection {name}}"
|
||||
media_season = {"winter": "WINTER", "spring": "SPRING", "summer": "SUMMER", "fall": "FALL"}
|
||||
media_format = {"tv": "TV", "short": "TV_SHORT", "movie": "MOVIE", "special": "SPECIAL", "ova": "OVA", "ona": "ONA", "music": "MUSIC"}
|
||||
media_status = {"finished": "FINISHED", "airing": "RELEASING", "not_yet_aired": "NOT_YET_RELEASED", "cancelled": "CANCELLED", "hiatus": "HIATUS"}
|
||||
base_url = "https://graphql.anilist.co"
|
||||
tag_query = "query{MediaTagCollection {name, category}}"
|
||||
genre_query = "query{GenreCollection}"
|
||||
|
||||
class AniList:
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.url = "https://graphql.anilist.co"
|
||||
self.tags = {}
|
||||
self.genres = {}
|
||||
self.tags = {t["name"].lower(): t["name"] for t in self._request(tag_query, {})["data"]["MediaTagCollection"]}
|
||||
self.genres = {g.lower(): g for g in self._request(genre_query, {})["data"]["GenreCollection"]}
|
||||
self.options = {
|
||||
"Tag": {}, "Tag Category": {},
|
||||
"Genre": {g.lower().replace(" ", "-"): g for g in self._request(genre_query, {})["data"]["GenreCollection"]},
|
||||
"Season": media_season, "Format": media_format, "Status": media_status
|
||||
}
|
||||
for media_tag in self._request(tag_query, {})["data"]["MediaTagCollection"]:
|
||||
self.options["Tag"][media_tag["name"].lower().replace(" ", "-")] = media_tag["name"]
|
||||
self.options["Tag Category"][media_tag["category"].lower().replace(" ", "-")] = media_tag["category"]
|
||||
|
||||
@retry(stop_max_attempt_number=2, retry_on_exception=util.retry_if_not_failed)
|
||||
def _request(self, query, variables):
|
||||
response = requests.post(self.url, json={"query": query, "variables": variables})
|
||||
def _request(self, query, variables, level=1):
|
||||
response = self.config.post(base_url, json={"query": query, "variables": variables})
|
||||
json_obj = response.json()
|
||||
if "errors" in json_obj:
|
||||
if json_obj['errors'][0]['message'] == "Too Many Requests.":
|
||||
if "Retry-After" in response.headers:
|
||||
time.sleep(int(response.headers["Retry-After"]))
|
||||
raise ValueError
|
||||
wait_time = int(response.headers["Retry-After"]) if "Retry-After" in response.headers else 0
|
||||
time.sleep(wait_time if wait_time > 0 else 10)
|
||||
if level < 6:
|
||||
return self._request(query, variables, level=level + 1)
|
||||
raise Failed(f"AniList Error: Connection Failed")
|
||||
else:
|
||||
raise Failed(f"AniList Error: {json_obj['errors'][0]['message']}")
|
||||
else:
|
||||
time.sleep(0.4)
|
||||
time.sleep(60 / 90)
|
||||
return json_obj
|
||||
|
||||
def _validate(self, anilist_id):
|
||||
def _validate_id(self, anilist_id):
|
||||
query = "query ($id: Int) {Media(id: $id) {id title{romaji english}}}"
|
||||
media = self._request(query, {"id": anilist_id})["data"]["Media"]
|
||||
if media["id"]:
|
||||
|
@ -75,63 +86,31 @@ class AniList:
|
|||
break
|
||||
return anilist_ids
|
||||
|
||||
def _top_rated(self, limit):
|
||||
query = """
|
||||
query ($page: Int) {
|
||||
Page(page: $page) {
|
||||
pageInfo {hasNextPage}
|
||||
media(averageScore_greater: 3, sort: SCORE_DESC, type: ANIME) {id}
|
||||
}
|
||||
}
|
||||
"""
|
||||
return self._pagenation(query, limit=limit)
|
||||
|
||||
def _popular(self, limit):
|
||||
query = """
|
||||
query ($page: Int) {
|
||||
Page(page: $page) {
|
||||
pageInfo {hasNextPage}
|
||||
media(popularity_greater: 1000, sort: POPULARITY_DESC, type: ANIME) {id}
|
||||
}
|
||||
}
|
||||
"""
|
||||
return self._pagenation(query, limit=limit)
|
||||
|
||||
def _season(self, season, year, sort, limit):
|
||||
query = """
|
||||
query ($page: Int, $season: MediaSeason, $year: Int, $sort: [MediaSort]) {
|
||||
Page(page: $page){
|
||||
pageInfo {hasNextPage}
|
||||
media(season: $season, seasonYear: $year, type: ANIME, sort: $sort){id}
|
||||
}
|
||||
}
|
||||
"""
|
||||
variables = {"season": season.upper(), "year": year, "sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"}
|
||||
return self._pagenation(query, limit=limit, variables=variables)
|
||||
|
||||
def _genre(self, genre, sort, limit):
|
||||
query = """
|
||||
query ($page: Int, $genre: String, $sort: [MediaSort]) {
|
||||
Page(page: $page){
|
||||
pageInfo {hasNextPage}
|
||||
media(genre: $genre, sort: $sort){id}
|
||||
}
|
||||
}
|
||||
"""
|
||||
variables = {"genre": genre, "sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"}
|
||||
return self._pagenation(query, limit=limit, variables=variables)
|
||||
|
||||
def _tag(self, tag, sort, limit):
|
||||
query = """
|
||||
query ($page: Int, $tag: String, $sort: [MediaSort]) {
|
||||
Page(page: $page){
|
||||
pageInfo {hasNextPage}
|
||||
media(tag: $tag, sort: $sort){id}
|
||||
}
|
||||
}
|
||||
"""
|
||||
variables = {"tag": tag, "sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"}
|
||||
return self._pagenation(query, limit=limit, variables=variables)
|
||||
def _search(self, **kwargs):
|
||||
query_vars = "$page: Int, $sort: [MediaSort]"
|
||||
media_vars = "sort: $sort, type: ANIME"
|
||||
variables = {"sort": "SCORE_DESC" if kwargs['sort_by'] == "score" else "POPULARITY_DESC"}
|
||||
for key, value in kwargs.items():
|
||||
if key not in ["sort_by", "limit"]:
|
||||
if "." in key:
|
||||
attr, mod = key.split(".")
|
||||
else:
|
||||
attr = key
|
||||
mod = ""
|
||||
ani_attr = attr_translation[attr] if attr in attr_translation else attr
|
||||
final = ani_attr if attr in no_mod_searches else f"{ani_attr}_{mod_translation[mod]}"
|
||||
if attr in ["start", "end"]:
|
||||
value = int(util.validate_date(value, f"anilist_search {key}", return_as="%Y%m%d"))
|
||||
if mod == "gte":
|
||||
value -= 1
|
||||
elif mod == "lte":
|
||||
value += 1
|
||||
query_vars += f", ${final}: {search_types[ani_attr]}"
|
||||
media_vars += f", {final}: ${final}"
|
||||
variables[key] = value
|
||||
query = f"query ({query_vars}) {{Page(page: $page){{pageInfo {{hasNextPage}}media({media_vars}){{id}}}}}}"
|
||||
logger.debug(query)
|
||||
return self._pagenation(query, limit=kwargs["limit"], variables=variables)
|
||||
|
||||
def _studio(self, studio_id):
|
||||
query = """
|
||||
|
@ -177,7 +156,7 @@ class AniList:
|
|||
name = ""
|
||||
if not ignore_ids:
|
||||
ignore_ids = [anilist_id]
|
||||
anilist_id, name = self._validate(anilist_id)
|
||||
anilist_id, name = self._validate_id(anilist_id)
|
||||
anilist_ids.append(anilist_id)
|
||||
json_obj = self._request(query, {"id": anilist_id})
|
||||
edges = [media["node"]["id"] for media in json_obj["data"]["Media"]["relations"]["edges"]
|
||||
|
@ -194,21 +173,21 @@ class AniList:
|
|||
|
||||
return anilist_ids, ignore_ids, name
|
||||
|
||||
def validate_genre(self, genre):
|
||||
if genre.lower() in self.genres:
|
||||
return self.genres[genre.lower()]
|
||||
raise Failed(f"AniList Error: Genre: {genre} does not exist")
|
||||
|
||||
def validate_tag(self, tag):
|
||||
if tag.lower() in self.tags:
|
||||
return self.tags[tag.lower()]
|
||||
raise Failed(f"AniList Error: Tag: {tag} does not exist")
|
||||
def validate(self, name, data):
|
||||
valid = []
|
||||
for d in util.get_list(data):
|
||||
data_check = d.lower().replace(" / ", "-").replace(" ", "-")
|
||||
if data_check in self.options[name]:
|
||||
valid.append(self.options[name][data_check])
|
||||
if len(valid) > 0:
|
||||
return valid
|
||||
raise Failed(f"AniList Error: {name}: {data} does not exist\nOptions: {', '.join([v for k, v in self.options[name].items()])}")
|
||||
|
||||
def validate_anilist_ids(self, anilist_ids, studio=False):
|
||||
anilist_id_list = util.get_int_list(anilist_ids, "AniList ID")
|
||||
anilist_values = []
|
||||
for anilist_id in anilist_ids:
|
||||
if studio: query = "query ($id: Int) {Studio(id: $id) {name}}"
|
||||
else: query = "query ($id: Int) {Media(id: $id) {id}}"
|
||||
query = f"query ($id: Int) {{{'Studio(id: $id) {name}' if studio else 'Media(id: $id) {id}'}}}"
|
||||
for anilist_id in anilist_id_list:
|
||||
try:
|
||||
self._request(query, {"id": anilist_id})
|
||||
anilist_values.append(anilist_id)
|
||||
|
@ -217,33 +196,36 @@ class AniList:
|
|||
return anilist_values
|
||||
raise Failed(f"AniList Error: No valid AniList IDs in {anilist_ids}")
|
||||
|
||||
def get_items(self, method, data):
|
||||
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||
def get_anilist_ids(self, method, data):
|
||||
if method == "anilist_id":
|
||||
anilist_id, name = self._validate(data)
|
||||
logger.info(f"Processing AniList ID: {data}")
|
||||
anilist_id, name = self._validate_id(data)
|
||||
anilist_ids = [anilist_id]
|
||||
logger.info(f"Processing {pretty}: ({data}) {name}")
|
||||
elif method in ["anilist_popular", "anilist_top_rated"]:
|
||||
anilist_ids = self._popular(data) if method == "anilist_popular" else self._top_rated(data)
|
||||
logger.info(f"Processing {pretty}: {data} Anime")
|
||||
elif method == "anilist_season":
|
||||
anilist_ids = self._season(data["season"], data["year"], data["sort_by"], data["limit"])
|
||||
logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from {util.pretty_seasons[data['season']]} {data['year']} sorted by {pretty_names[data['sort_by']]}")
|
||||
elif method == "anilist_genre":
|
||||
anilist_ids = self._genre(data["genre"], data["sort_by"], data["limit"])
|
||||
logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from the Genre: {data['genre']} sorted by {pretty_names[data['sort_by']]}")
|
||||
elif method == "anilist_tag":
|
||||
anilist_ids = self._tag(data["tag"], data["sort_by"], data["limit"])
|
||||
logger.info(f"Processing {pretty}: {data['limit'] if data['limit'] > 0 else 'All'} Anime from the Tag: {data['tag']} sorted by {pretty_names[data['sort_by']]}")
|
||||
elif method in ["anilist_studio", "anilist_relations"]:
|
||||
if method == "anilist_studio": anilist_ids, name = self._studio(data)
|
||||
else: anilist_ids, _, name = self._relations(data)
|
||||
logger.info(f"Processing {pretty}: ({data}) {name} ({len(anilist_ids)} Anime)")
|
||||
elif method == "anilist_studio":
|
||||
anilist_ids, name = self._studio(data)
|
||||
logger.info(f"Processing AniList Studio: ({data}) {name} ({len(anilist_ids)} Anime)")
|
||||
elif method == "anilist_relations":
|
||||
anilist_ids, _, name = self._relations(data)
|
||||
logger.info(f"Processing AniList Relations: ({data}) {name} ({len(anilist_ids)} Anime)")
|
||||
else:
|
||||
raise Failed(f"AniList Error: Method {method} not supported")
|
||||
movie_ids, show_ids = self.config.Convert.anilist_to_ids(anilist_ids)
|
||||
if method == "anilist_popular":
|
||||
data = {"limit": data, "popularity.gt": 3, "sort_by": "popular"}
|
||||
elif method == "anilist_top_rated":
|
||||
data = {"limit": data, "score.gt": 3, "sort_by": "score"}
|
||||
elif method not in builders:
|
||||
raise Failed(f"AniList Error: Method {method} not supported")
|
||||
message = f"Processing {method.replace('_', ' ').title().replace('Anilist', 'AniList')}:\nSort By: {pretty_names[data['sort_by']]}"
|
||||
if data['limit'] > 0:
|
||||
message += f"\nLimit: {data['limit']}"
|
||||
for key, value in data.items():
|
||||
if "." in key:
|
||||
attr, mod = key.split(".")
|
||||
else:
|
||||
attr = key
|
||||
mod = ""
|
||||
message += f"\n{attr.replace('_', ' ').title()} {util.mod_displays[mod]} {value}"
|
||||
util.print_multiline(message)
|
||||
anilist_ids = self._search(**data)
|
||||
logger.debug("")
|
||||
logger.debug(f"{len(anilist_ids)} AniList IDs Found: {anilist_ids}")
|
||||
logger.debug(f"{len(movie_ids)} TMDb IDs Found: {movie_ids}")
|
||||
logger.debug(f"{len(show_ids)} TVDb IDs Found: {show_ids}")
|
||||
return movie_ids, show_ids
|
||||
return anilist_ids
|
||||
|
|
1842
modules/builder.py
1842
modules/builder.py
File diff suppressed because it is too large
Load diff
144
modules/cache.py
144
modules/cache.py
|
@ -1,35 +1,39 @@
|
|||
import logging, os, random, sqlite3
|
||||
from contextlib import closing
|
||||
from datetime import datetime, timedelta
|
||||
from modules import util
|
||||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
class Cache:
|
||||
def __init__(self, config_path, expiration):
|
||||
cache = f"{os.path.splitext(config_path)[0]}.cache"
|
||||
with sqlite3.connect(cache) as connection:
|
||||
self.cache_path = f"{os.path.splitext(config_path)[0]}.cache"
|
||||
self.expiration = expiration
|
||||
with sqlite3.connect(self.cache_path) as connection:
|
||||
connection.row_factory = sqlite3.Row
|
||||
with closing(connection.cursor()) as cursor:
|
||||
cursor.execute("SELECT count(name) FROM sqlite_master WHERE type='table' AND name='guid_map'")
|
||||
if cursor.fetchone()[0] == 0:
|
||||
logger.info(f"Initializing cache database at {cache}")
|
||||
logger.info(f"Initializing cache database at {self.cache_path}")
|
||||
else:
|
||||
logger.info(f"Using cache database at {cache}")
|
||||
logger.info(f"Using cache database at {self.cache_path}")
|
||||
cursor.execute("DROP TABLE IF EXISTS guids")
|
||||
cursor.execute("DROP TABLE IF EXISTS guid_map")
|
||||
cursor.execute("DROP TABLE IF EXISTS imdb_to_tvdb_map")
|
||||
cursor.execute("DROP TABLE IF EXISTS tmdb_to_tvdb_map")
|
||||
cursor.execute("DROP TABLE IF EXISTS imdb_map")
|
||||
cursor.execute(
|
||||
"""CREATE TABLE IF NOT EXISTS guid_map (
|
||||
INTEGER PRIMARY KEY,
|
||||
"""CREATE TABLE IF NOT EXISTS guids_map (
|
||||
key INTEGER PRIMARY KEY,
|
||||
plex_guid TEXT UNIQUE,
|
||||
t_id TEXT,
|
||||
imdb_id TEXT,
|
||||
media_type TEXT,
|
||||
expiration_date TEXT)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE TABLE IF NOT EXISTS imdb_to_tmdb_map (
|
||||
INTEGER PRIMARY KEY,
|
||||
key INTEGER PRIMARY KEY,
|
||||
imdb_id TEXT UNIQUE,
|
||||
tmdb_id TEXT,
|
||||
media_type TEXT,
|
||||
|
@ -37,28 +41,28 @@ class Cache:
|
|||
)
|
||||
cursor.execute(
|
||||
"""CREATE TABLE IF NOT EXISTS imdb_to_tvdb_map2 (
|
||||
INTEGER PRIMARY KEY,
|
||||
key INTEGER PRIMARY KEY,
|
||||
imdb_id TEXT UNIQUE,
|
||||
tvdb_id TEXT,
|
||||
expiration_date TEXT)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE TABLE IF NOT EXISTS tmdb_to_tvdb_map2 (
|
||||
INTEGER PRIMARY KEY,
|
||||
key INTEGER PRIMARY KEY,
|
||||
tmdb_id TEXT UNIQUE,
|
||||
tvdb_id TEXT,
|
||||
expiration_date TEXT)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE TABLE IF NOT EXISTS letterboxd_map (
|
||||
INTEGER PRIMARY KEY,
|
||||
key INTEGER PRIMARY KEY,
|
||||
letterboxd_id TEXT UNIQUE,
|
||||
tmdb_id TEXT,
|
||||
expiration_date TEXT)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE TABLE IF NOT EXISTS omdb_data (
|
||||
INTEGER PRIMARY KEY,
|
||||
key INTEGER PRIMARY KEY,
|
||||
imdb_id TEXT UNIQUE,
|
||||
title TEXT,
|
||||
year INTEGER,
|
||||
|
@ -72,7 +76,7 @@ class Cache:
|
|||
)
|
||||
cursor.execute(
|
||||
"""CREATE TABLE IF NOT EXISTS anime_map (
|
||||
INTEGER PRIMARY KEY,
|
||||
key INTEGER PRIMARY KEY,
|
||||
anidb TEXT UNIQUE,
|
||||
anilist TEXT,
|
||||
myanimelist TEXT,
|
||||
|
@ -80,41 +84,57 @@ class Cache:
|
|||
expiration_date TEXT)"""
|
||||
)
|
||||
cursor.execute(
|
||||
"""CREATE TABLE IF NOT EXISTS image_map (
|
||||
INTEGER PRIMARY KEY,
|
||||
rating_key TEXT,
|
||||
library TEXT,
|
||||
type TEXT,
|
||||
overlay TEXT,
|
||||
compare TEXT,
|
||||
location TEXT)"""
|
||||
"""CREATE TABLE IF NOT EXISTS image_maps (
|
||||
key INTEGER PRIMARY KEY,
|
||||
library TEXT UNIQUE)"""
|
||||
)
|
||||
self.expiration = expiration
|
||||
self.cache_path = cache
|
||||
cursor.execute("SELECT count(name) FROM sqlite_master WHERE type='table' AND name='image_map'")
|
||||
if cursor.fetchone()[0] > 0:
|
||||
cursor.execute(f"SELECT DISTINCT library FROM image_map")
|
||||
for library in cursor.fetchall():
|
||||
table_name = self.get_image_table_name(library["library"])
|
||||
cursor.execute(f"SELECT DISTINCT * FROM image_map WHERE library='{library['library']}'")
|
||||
for row in cursor.fetchall():
|
||||
if row["type"] == "poster":
|
||||
final_table = table_name if row["type"] == "poster" else f"{table_name}_backgrounds"
|
||||
self.update_image_map(row["rating_key"], final_table, row["location"], row["compare"], overlay=row["overlay"])
|
||||
cursor.execute("DROP TABLE IF EXISTS image_map")
|
||||
|
||||
def query_guid_map(self, plex_guid):
|
||||
id_to_return = None
|
||||
imdb_id = None
|
||||
media_type = None
|
||||
expired = None
|
||||
with sqlite3.connect(self.cache_path) as connection:
|
||||
connection.row_factory = sqlite3.Row
|
||||
with closing(connection.cursor()) as cursor:
|
||||
cursor.execute(f"SELECT * FROM guid_map WHERE plex_guid = ?", (plex_guid,))
|
||||
cursor.execute(f"SELECT * FROM guids_map WHERE plex_guid = ?", (plex_guid,))
|
||||
row = cursor.fetchone()
|
||||
if row:
|
||||
time_between_insertion = datetime.now() - datetime.strptime(row["expiration_date"], "%Y-%m-%d")
|
||||
id_to_return = row["t_id"]
|
||||
id_to_return = util.get_list(row["t_id"], int_list=True)
|
||||
imdb_id = util.get_list(row["imdb_id"])
|
||||
media_type = row["media_type"]
|
||||
expired = time_between_insertion.days > self.expiration
|
||||
return id_to_return, media_type, expired
|
||||
return id_to_return, imdb_id, media_type, expired
|
||||
|
||||
def update_guid_map(self, media_type, plex_guid, t_id, expired):
|
||||
self._update_map("guid_map", "plex_guid", plex_guid, "t_id", t_id, expired, media_type=media_type)
|
||||
def update_guid_map(self, plex_guid, t_id, imdb_id, expired, media_type):
|
||||
expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.expiration)))
|
||||
with sqlite3.connect(self.cache_path) as connection:
|
||||
connection.row_factory = sqlite3.Row
|
||||
with closing(connection.cursor()) as cursor:
|
||||
cursor.execute(f"INSERT OR IGNORE INTO guids_map(plex_guid) VALUES(?)", (plex_guid,))
|
||||
if media_type is None:
|
||||
sql = f"UPDATE guids_map SET t_id = ?, imdb_id = ?, expiration_date = ? WHERE plex_guid = ?"
|
||||
cursor.execute(sql, (t_id, imdb_id, expiration_date.strftime("%Y-%m-%d"), plex_guid))
|
||||
else:
|
||||
sql = f"UPDATE guids_map SET t_id = ?, imdb_id = ?, expiration_date = ?, media_type = ? WHERE plex_guid = ?"
|
||||
cursor.execute(sql, (t_id, imdb_id, expiration_date.strftime("%Y-%m-%d"), media_type, plex_guid))
|
||||
|
||||
def query_imdb_to_tmdb_map(self, media_type, _id, imdb=True):
|
||||
def query_imdb_to_tmdb_map(self, _id, imdb=True, media_type=None, return_type=False):
|
||||
from_id = "imdb_id" if imdb else "tmdb_id"
|
||||
to_id = "tmdb_id" if imdb else "imdb_id"
|
||||
return self._query_map("imdb_to_tmdb_map", _id, from_id, to_id, media_type=media_type)
|
||||
return self._query_map("imdb_to_tmdb_map", _id, from_id, to_id, media_type=media_type, return_type=return_type)
|
||||
|
||||
def update_imdb_to_tmdb_map(self, media_type, expired, imdb_id, tmdb_id):
|
||||
self._update_map("imdb_to_tmdb_map", "imdb_id", imdb_id, "tmdb_id", tmdb_id, expired, media_type=media_type)
|
||||
|
@ -141,9 +161,10 @@ class Cache:
|
|||
def update_letterboxd_map(self, expired, letterboxd_id, tmdb_id):
|
||||
self._update_map("letterboxd_map", "letterboxd_id", letterboxd_id, "tmdb_id", tmdb_id, expired)
|
||||
|
||||
def _query_map(self, map_name, _id, from_id, to_id, media_type=None):
|
||||
def _query_map(self, map_name, _id, from_id, to_id, media_type=None, return_type=False):
|
||||
id_to_return = None
|
||||
expired = None
|
||||
out_type = None
|
||||
with sqlite3.connect(self.cache_path) as connection:
|
||||
connection.row_factory = sqlite3.Row
|
||||
with closing(connection.cursor()) as cursor:
|
||||
|
@ -157,7 +178,11 @@ class Cache:
|
|||
time_between_insertion = datetime.now() - datetime_object
|
||||
id_to_return = row[to_id] if to_id == "imdb_id" else int(row[to_id])
|
||||
expired = time_between_insertion.days > self.expiration
|
||||
return id_to_return, expired
|
||||
out_type = row["media_type"] if return_type else None
|
||||
if return_type:
|
||||
return id_to_return, out_type, expired
|
||||
else:
|
||||
return id_to_return, expired
|
||||
|
||||
def _update_map(self, map_name, val1_name, val1, val2_name, val2, expired, media_type=None):
|
||||
expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=random.randint(1, self.expiration)))
|
||||
|
@ -233,30 +258,69 @@ class Cache:
|
|||
cursor.execute("INSERT OR IGNORE INTO anime_map(anidb) VALUES(?)", (anime_ids["anidb"],))
|
||||
cursor.execute("UPDATE anime_map SET anilist = ?, myanimelist = ?, kitsu = ?, expiration_date = ? WHERE anidb = ?", (anime_ids["anidb"], anime_ids["myanimelist"], anime_ids["kitsu"], expiration_date.strftime("%Y-%m-%d"), anime_ids["anidb"]))
|
||||
|
||||
def query_image_map_overlay(self, library, image_type, overlay):
|
||||
def get_image_table_name(self, library):
|
||||
table_name = None
|
||||
with sqlite3.connect(self.cache_path) as connection:
|
||||
connection.row_factory = sqlite3.Row
|
||||
with closing(connection.cursor()) as cursor:
|
||||
cursor.execute(f"SELECT * FROM image_maps WHERE library = ?", (library,))
|
||||
row = cursor.fetchone()
|
||||
if row and row["key"]:
|
||||
table_name = f"image_map_{row['key']}"
|
||||
else:
|
||||
cursor.execute("INSERT OR IGNORE INTO image_maps(library) VALUES(?)", (library,))
|
||||
cursor.execute(f"SELECT * FROM image_maps WHERE library = ?", (library,))
|
||||
row = cursor.fetchone()
|
||||
if row and row["key"]:
|
||||
table_name = f"image_map_{row['key']}"
|
||||
cursor.execute(
|
||||
f"""CREATE TABLE IF NOT EXISTS {table_name} (
|
||||
key INTEGER PRIMARY KEY,
|
||||
rating_key TEXT UNIQUE,
|
||||
overlay TEXT,
|
||||
compare TEXT,
|
||||
location TEXT)"""
|
||||
)
|
||||
cursor.execute(
|
||||
f"""CREATE TABLE IF NOT EXISTS {table_name}_backgrounds (
|
||||
key INTEGER PRIMARY KEY,
|
||||
rating_key TEXT UNIQUE,
|
||||
overlay TEXT,
|
||||
compare TEXT,
|
||||
location TEXT)"""
|
||||
)
|
||||
return table_name
|
||||
|
||||
def query_image_map_overlay(self, table_name, overlay):
|
||||
rks = []
|
||||
with sqlite3.connect(self.cache_path) as connection:
|
||||
connection.row_factory = sqlite3.Row
|
||||
with closing(connection.cursor()) as cursor:
|
||||
cursor.execute(f"SELECT * FROM image_map WHERE overlay = ? AND library = ? AND type = ?", (overlay, library, image_type))
|
||||
cursor.execute(f"SELECT * FROM {table_name} WHERE overlay = ?", (overlay,))
|
||||
rows = cursor.fetchall()
|
||||
for row in rows:
|
||||
rks.append(int(row["rating_key"]))
|
||||
return rks
|
||||
|
||||
def query_image_map(self, rating_key, library, image_type):
|
||||
def update_remove_overlay(self, table_name, overlay):
|
||||
with sqlite3.connect(self.cache_path) as connection:
|
||||
connection.row_factory = sqlite3.Row
|
||||
with closing(connection.cursor()) as cursor:
|
||||
cursor.execute(f"SELECT * FROM image_map WHERE rating_key = ? AND library = ? AND type = ?", (rating_key, library, image_type))
|
||||
cursor.execute(f"UPDATE {table_name} SET overlay = ? WHERE overlay = ?", ("", overlay))
|
||||
|
||||
def query_image_map(self, rating_key, table_name):
|
||||
with sqlite3.connect(self.cache_path) as connection:
|
||||
connection.row_factory = sqlite3.Row
|
||||
with closing(connection.cursor()) as cursor:
|
||||
cursor.execute(f"SELECT * FROM {table_name} WHERE rating_key = ?", (rating_key,))
|
||||
row = cursor.fetchone()
|
||||
if row and row["location"]:
|
||||
return row["location"], row["compare"], row["overlay"]
|
||||
return None, None, None
|
||||
return row["location"], row["compare"]
|
||||
return None, None
|
||||
|
||||
def update_image_map(self, rating_key, library, image_type, location, compare, overlay):
|
||||
def update_image_map(self, rating_key, table_name, location, compare, overlay=""):
|
||||
with sqlite3.connect(self.cache_path) as connection:
|
||||
connection.row_factory = sqlite3.Row
|
||||
with closing(connection.cursor()) as cursor:
|
||||
cursor.execute("INSERT OR IGNORE INTO image_map(rating_key, library, type) VALUES(?, ?, ?)", (rating_key, library, image_type))
|
||||
cursor.execute("UPDATE image_map SET location = ?, compare = ?, overlay = ? WHERE rating_key = ? AND library = ? AND type = ?", (location, compare, overlay, rating_key, library, image_type))
|
||||
cursor.execute(f"INSERT OR IGNORE INTO {table_name}(rating_key) VALUES(?)", (rating_key,))
|
||||
cursor.execute(f"UPDATE {table_name} SET location = ?, compare = ?, overlay = ? WHERE rating_key = ?", (location, compare, overlay, rating_key))
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import logging, os
|
||||
import logging, os, requests
|
||||
from datetime import datetime
|
||||
from modules import util
|
||||
from lxml import html
|
||||
from modules import util, radarr, sonarr
|
||||
from modules.anidb import AniDB
|
||||
from modules.anilist import AniList
|
||||
from modules.cache import Cache
|
||||
|
@ -13,39 +14,19 @@ from modules.omdb import OMDb
|
|||
from modules.plex import Plex
|
||||
from modules.radarr import Radarr
|
||||
from modules.sonarr import Sonarr
|
||||
from modules.stevenlu import StevenLu
|
||||
from modules.tautulli import Tautulli
|
||||
from modules.tmdb import TMDb
|
||||
from modules.trakttv import Trakt
|
||||
from modules.trakt import Trakt
|
||||
from modules.tvdb import TVDb
|
||||
from modules.util import Failed
|
||||
from retrying import retry
|
||||
from ruamel import yaml
|
||||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
sync_modes = {"append": "Only Add Items to the Collection", "sync": "Add & Remove Items from the Collection"}
|
||||
radarr_availabilities = {
|
||||
"announced": "For Announced",
|
||||
"cinemas": "For In Cinemas",
|
||||
"released": "For Released",
|
||||
"db": "For PreDB"
|
||||
}
|
||||
sonarr_monitors = {
|
||||
"all": "Monitor all episodes except specials",
|
||||
"future": "Monitor episodes that have not aired yet",
|
||||
"missing": "Monitor episodes that do not have files or have not aired yet",
|
||||
"existing": "Monitor episodes that have files or have not aired yet",
|
||||
"pilot": "Monitor the first episode. All other episodes will be ignored",
|
||||
"first": "Monitor all episodes of the first season. All other seasons will be ignored",
|
||||
"latest": "Monitor all episodes of the latest season and future seasons",
|
||||
"none": "No episodes will be monitored"
|
||||
}
|
||||
sonarr_series_types = {
|
||||
"standard": "Episodes released with SxxEyy pattern",
|
||||
"daily": "Episodes released daily or less frequently that use year-month-day (2017-05-25)",
|
||||
"anime": "Episodes released using an absolute episode number"
|
||||
}
|
||||
mass_update_options = {"tmdb": "Use TMDb Metadata", "omdb": "Use IMDb Metadata through OMDb"}
|
||||
library_types = {"movie": "For Movie Libraries", "show": "For Show Libraries"}
|
||||
|
||||
class Config:
|
||||
def __init__(self, default_dir, config_path=None, is_test=False, time_scheduled=None, requested_collections=None, requested_libraries=None, resume_from=None):
|
||||
|
@ -108,6 +89,7 @@ class Config:
|
|||
if "omdb" in new_config: new_config["omdb"] = new_config.pop("omdb")
|
||||
if "trakt" in new_config: new_config["trakt"] = new_config.pop("trakt")
|
||||
if "mal" in new_config: new_config["mal"] = new_config.pop("mal")
|
||||
if "anidb" in new_config: new_config["anidb"] = new_config.pop("anidb")
|
||||
yaml.round_trip_dump(new_config, open(self.config_path, "w", encoding="utf-8"), indent=ind, block_seq_indent=bsi)
|
||||
self.data = new_config
|
||||
except yaml.scanner.ScannerError as e:
|
||||
|
@ -183,38 +165,42 @@ class Config:
|
|||
raise Failed(f"Config Error: {message}")
|
||||
if do_print:
|
||||
util.print_multiline(f"Config Warning: {message}")
|
||||
if attribute in data and data[attribute] and test_list is not None and data[attribute] not in test_list:
|
||||
if data and attribute in data and data[attribute] and test_list is not None and data[attribute] not in test_list:
|
||||
util.print_multiline(options)
|
||||
return default
|
||||
|
||||
self.general = {}
|
||||
self.general["cache"] = check_for_attribute(self.data, "cache", parent="settings", var_type="bool", default=True)
|
||||
self.general["cache_expiration"] = check_for_attribute(self.data, "cache_expiration", parent="settings", var_type="int", default=60)
|
||||
self.session = requests.Session()
|
||||
|
||||
self.general = {
|
||||
"cache": check_for_attribute(self.data, "cache", parent="settings", var_type="bool", default=True),
|
||||
"cache_expiration": check_for_attribute(self.data, "cache_expiration", parent="settings", var_type="int", default=60),
|
||||
"asset_directory": check_for_attribute(self.data, "asset_directory", parent="settings", var_type="list_path", default=[os.path.join(default_dir, "assets")]),
|
||||
"asset_folders": check_for_attribute(self.data, "asset_folders", parent="settings", var_type="bool", default=True),
|
||||
"assets_for_all": check_for_attribute(self.data, "assets_for_all", parent="settings", var_type="bool", default=False),
|
||||
"sync_mode": check_for_attribute(self.data, "sync_mode", parent="settings", default="append", test_list=sync_modes),
|
||||
"run_again_delay": check_for_attribute(self.data, "run_again_delay", parent="settings", var_type="int", default=0),
|
||||
"show_unmanaged": check_for_attribute(self.data, "show_unmanaged", parent="settings", var_type="bool", default=True),
|
||||
"show_filtered": check_for_attribute(self.data, "show_filtered", parent="settings", var_type="bool", default=False),
|
||||
"show_missing": check_for_attribute(self.data, "show_missing", parent="settings", var_type="bool", default=True),
|
||||
"save_missing": check_for_attribute(self.data, "save_missing", parent="settings", var_type="bool", default=True),
|
||||
"missing_only_released": check_for_attribute(self.data, "missing_only_released", parent="settings", var_type="bool", default=False),
|
||||
"create_asset_folders": check_for_attribute(self.data, "create_asset_folders", parent="settings", var_type="bool", default=False)
|
||||
}
|
||||
if self.general["cache"]:
|
||||
util.separator()
|
||||
self.Cache = Cache(self.config_path, self.general["cache_expiration"])
|
||||
else:
|
||||
self.Cache = None
|
||||
self.general["asset_directory"] = check_for_attribute(self.data, "asset_directory", parent="settings", var_type="list_path", default=[os.path.join(default_dir, "assets")])
|
||||
self.general["asset_folders"] = check_for_attribute(self.data, "asset_folders", parent="settings", var_type="bool", default=True)
|
||||
self.general["assets_for_all"] = check_for_attribute(self.data, "assets_for_all", parent="settings", var_type="bool", default=False)
|
||||
self.general["sync_mode"] = check_for_attribute(self.data, "sync_mode", parent="settings", default="append", test_list=sync_modes)
|
||||
self.general["run_again_delay"] = check_for_attribute(self.data, "run_again_delay", parent="settings", var_type="int", default=0)
|
||||
self.general["show_unmanaged"] = check_for_attribute(self.data, "show_unmanaged", parent="settings", var_type="bool", default=True)
|
||||
self.general["show_filtered"] = check_for_attribute(self.data, "show_filtered", parent="settings", var_type="bool", default=False)
|
||||
self.general["show_missing"] = check_for_attribute(self.data, "show_missing", parent="settings", var_type="bool", default=True)
|
||||
self.general["save_missing"] = check_for_attribute(self.data, "save_missing", parent="settings", var_type="bool", default=True)
|
||||
|
||||
util.separator()
|
||||
|
||||
self.TMDb = None
|
||||
if "tmdb" in self.data:
|
||||
logger.info("Connecting to TMDb...")
|
||||
self.tmdb = {}
|
||||
try: self.tmdb["apikey"] = check_for_attribute(self.data, "apikey", parent="tmdb", throw=True)
|
||||
except Failed as e: raise Failed(e)
|
||||
self.tmdb["language"] = check_for_attribute(self.data, "language", parent="tmdb", default="en")
|
||||
self.TMDb = TMDb(self, self.tmdb)
|
||||
self.TMDb = TMDb(self, {
|
||||
"apikey": check_for_attribute(self.data, "apikey", parent="tmdb", throw=True),
|
||||
"language": check_for_attribute(self.data, "language", parent="tmdb", default="en")
|
||||
})
|
||||
logger.info(f"TMDb Connection {'Failed' if self.TMDb is None else 'Successful'}")
|
||||
else:
|
||||
raise Failed("Config Error: tmdb attribute not found")
|
||||
|
@ -224,10 +210,8 @@ class Config:
|
|||
self.OMDb = None
|
||||
if "omdb" in self.data:
|
||||
logger.info("Connecting to OMDb...")
|
||||
self.omdb = {}
|
||||
try:
|
||||
self.omdb["apikey"] = check_for_attribute(self.data, "apikey", parent="omdb", throw=True)
|
||||
self.OMDb = OMDb(self.omdb, Cache=self.Cache)
|
||||
self.OMDb = OMDb(self, {"apikey": check_for_attribute(self.data, "apikey", parent="omdb", throw=True)})
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
logger.info(f"OMDb Connection {'Failed' if self.OMDb is None else 'Successful'}")
|
||||
|
@ -239,13 +223,13 @@ class Config:
|
|||
self.Trakt = None
|
||||
if "trakt" in self.data:
|
||||
logger.info("Connecting to Trakt...")
|
||||
self.trakt = {}
|
||||
try:
|
||||
self.trakt["client_id"] = check_for_attribute(self.data, "client_id", parent="trakt", throw=True)
|
||||
self.trakt["client_secret"] = check_for_attribute(self.data, "client_secret", parent="trakt", throw=True)
|
||||
self.trakt["config_path"] = self.config_path
|
||||
authorization = self.data["trakt"]["authorization"] if "authorization" in self.data["trakt"] and self.data["trakt"]["authorization"] else None
|
||||
self.Trakt = Trakt(self.trakt, authorization)
|
||||
self.Trakt = Trakt(self, {
|
||||
"client_id": check_for_attribute(self.data, "client_id", parent="trakt", throw=True),
|
||||
"client_secret": check_for_attribute(self.data, "client_secret", parent="trakt", throw=True),
|
||||
"config_path": self.config_path,
|
||||
"authorization": self.data["trakt"]["authorization"] if "authorization" in self.data["trakt"] else None
|
||||
})
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
logger.info(f"Trakt Connection {'Failed' if self.Trakt is None else 'Successful'}")
|
||||
|
@ -257,67 +241,87 @@ class Config:
|
|||
self.MyAnimeList = None
|
||||
if "mal" in self.data:
|
||||
logger.info("Connecting to My Anime List...")
|
||||
self.mal = {}
|
||||
try:
|
||||
self.mal["client_id"] = check_for_attribute(self.data, "client_id", parent="mal", throw=True)
|
||||
self.mal["client_secret"] = check_for_attribute(self.data, "client_secret", parent="mal", throw=True)
|
||||
self.mal["config_path"] = self.config_path
|
||||
authorization = self.data["mal"]["authorization"] if "authorization" in self.data["mal"] and self.data["mal"]["authorization"] else None
|
||||
self.MyAnimeList = MyAnimeList(self.mal, self, authorization)
|
||||
self.MyAnimeList = MyAnimeList(self, {
|
||||
"client_id": check_for_attribute(self.data, "client_id", parent="mal", throw=True),
|
||||
"client_secret": check_for_attribute(self.data, "client_secret", parent="mal", throw=True),
|
||||
"config_path": self.config_path,
|
||||
"authorization": self.data["mal"]["authorization"] if "authorization" in self.data["mal"] else None
|
||||
})
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
logger.info(f"My Anime List Connection {'Failed' if self.MyAnimeList is None else 'Successful'}")
|
||||
else:
|
||||
logger.warning("mal attribute not found")
|
||||
|
||||
util.separator()
|
||||
|
||||
self.AniDB = None
|
||||
if "anidb" in self.data:
|
||||
util.separator()
|
||||
logger.info("Connecting to AniDB...")
|
||||
try:
|
||||
self.AniDB = AniDB(self, {
|
||||
"username": check_for_attribute(self.data, "username", parent="anidb", throw=True),
|
||||
"password": check_for_attribute(self.data, "password", parent="anidb", throw=True)
|
||||
})
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
logger.info(f"My Anime List Connection {'Failed Continuing as Guest ' if self.MyAnimeList is None else 'Successful'}")
|
||||
if self.AniDB is None:
|
||||
self.AniDB = AniDB(self, None)
|
||||
|
||||
self.TVDb = TVDb(self)
|
||||
self.IMDb = IMDb(self)
|
||||
self.AniDB = AniDB(self)
|
||||
self.Convert = Convert(self)
|
||||
self.AniList = AniList(self)
|
||||
self.Letterboxd = Letterboxd(self)
|
||||
self.ICheckMovies = ICheckMovies(self)
|
||||
self.StevenLu = StevenLu(self)
|
||||
|
||||
util.separator()
|
||||
|
||||
logger.info("Connecting to Plex Libraries...")
|
||||
|
||||
self.general["plex"] = {}
|
||||
self.general["plex"]["url"] = check_for_attribute(self.data, "url", parent="plex", var_type="url", default_is_none=True)
|
||||
self.general["plex"]["token"] = check_for_attribute(self.data, "token", parent="plex", default_is_none=True)
|
||||
self.general["plex"]["timeout"] = check_for_attribute(self.data, "timeout", parent="plex", var_type="int", default=60)
|
||||
self.general["plex"]["clean_bundles"] = check_for_attribute(self.data, "clean_bundles", parent="plex", var_type="bool", default=False)
|
||||
self.general["plex"]["empty_trash"] = check_for_attribute(self.data, "empty_trash", parent="plex", var_type="bool", default=False)
|
||||
self.general["plex"]["optimize"] = check_for_attribute(self.data, "optimize", parent="plex", var_type="bool", default=False)
|
||||
|
||||
self.general["radarr"] = {}
|
||||
self.general["radarr"]["url"] = check_for_attribute(self.data, "url", parent="radarr", var_type="url", default_is_none=True)
|
||||
self.general["radarr"]["token"] = check_for_attribute(self.data, "token", parent="radarr", default_is_none=True)
|
||||
self.general["radarr"]["add"] = check_for_attribute(self.data, "add", parent="radarr", var_type="bool", default=False)
|
||||
self.general["radarr"]["root_folder_path"] = check_for_attribute(self.data, "root_folder_path", parent="radarr", default_is_none=True)
|
||||
self.general["radarr"]["monitor"] = check_for_attribute(self.data, "monitor", parent="radarr", var_type="bool", default=True)
|
||||
self.general["radarr"]["availability"] = check_for_attribute(self.data, "availability", parent="radarr", test_list=radarr_availabilities, default="announced")
|
||||
self.general["radarr"]["quality_profile"] = check_for_attribute(self.data, "quality_profile", parent="radarr", default_is_none=True)
|
||||
self.general["radarr"]["tag"] = check_for_attribute(self.data, "tag", parent="radarr", var_type="lower_list", default_is_none=True)
|
||||
self.general["radarr"]["search"] = check_for_attribute(self.data, "search", parent="radarr", var_type="bool", default=False)
|
||||
|
||||
self.general["sonarr"] = {}
|
||||
self.general["sonarr"]["url"] = check_for_attribute(self.data, "url", parent="sonarr", var_type="url", default_is_none=True)
|
||||
self.general["sonarr"]["token"] = check_for_attribute(self.data, "token", parent="sonarr", default_is_none=True)
|
||||
self.general["sonarr"]["add"] = check_for_attribute(self.data, "add", parent="sonarr", var_type="bool", default=False)
|
||||
self.general["sonarr"]["root_folder_path"] = check_for_attribute(self.data, "root_folder_path", parent="sonarr", default_is_none=True)
|
||||
self.general["sonarr"]["monitor"] = check_for_attribute(self.data, "monitor", parent="sonarr", test_list=sonarr_monitors, default="all")
|
||||
self.general["sonarr"]["quality_profile"] = check_for_attribute(self.data, "quality_profile", parent="sonarr", default_is_none=True)
|
||||
self.general["sonarr"]["language_profile"] = check_for_attribute(self.data, "language_profile", parent="sonarr", default_is_none=True)
|
||||
self.general["sonarr"]["series_type"] = check_for_attribute(self.data, "series_type", parent="sonarr", test_list=sonarr_series_types, default="standard")
|
||||
self.general["sonarr"]["season_folder"] = check_for_attribute(self.data, "season_folder", parent="sonarr", var_type="bool", default=True)
|
||||
self.general["sonarr"]["tag"] = check_for_attribute(self.data, "tag", parent="sonarr", var_type="lower_list", default_is_none=True)
|
||||
self.general["sonarr"]["search"] = check_for_attribute(self.data, "search", parent="sonarr", var_type="bool", default=False)
|
||||
self.general["sonarr"]["cutoff_search"] = check_for_attribute(self.data, "cutoff_search", parent="sonarr", var_type="bool", default=False)
|
||||
|
||||
self.general["tautulli"] = {}
|
||||
self.general["tautulli"]["url"] = check_for_attribute(self.data, "url", parent="tautulli", var_type="url", default_is_none=True)
|
||||
self.general["tautulli"]["apikey"] = check_for_attribute(self.data, "apikey", parent="tautulli", default_is_none=True)
|
||||
self.general["plex"] = {
|
||||
"url": check_for_attribute(self.data, "url", parent="plex", var_type="url", default_is_none=True),
|
||||
"token": check_for_attribute(self.data, "token", parent="plex", default_is_none=True),
|
||||
"timeout": check_for_attribute(self.data, "timeout", parent="plex", var_type="int", default=60),
|
||||
"clean_bundles": check_for_attribute(self.data, "clean_bundles", parent="plex", var_type="bool", default=False),
|
||||
"empty_trash": check_for_attribute(self.data, "empty_trash", parent="plex", var_type="bool", default=False),
|
||||
"optimize": check_for_attribute(self.data, "optimize", parent="plex", var_type="bool", default=False)
|
||||
}
|
||||
self.general["radarr"] = {
|
||||
"url": check_for_attribute(self.data, "url", parent="radarr", var_type="url", default_is_none=True),
|
||||
"token": check_for_attribute(self.data, "token", parent="radarr", default_is_none=True),
|
||||
"add": check_for_attribute(self.data, "add", parent="radarr", var_type="bool", default=False),
|
||||
"add_existing": check_for_attribute(self.data, "add_existing", parent="radarr", var_type="bool", default=False),
|
||||
"root_folder_path": check_for_attribute(self.data, "root_folder_path", parent="radarr", default_is_none=True),
|
||||
"monitor": check_for_attribute(self.data, "monitor", parent="radarr", var_type="bool", default=True),
|
||||
"availability": check_for_attribute(self.data, "availability", parent="radarr", test_list=radarr.availability_descriptions, default="announced"),
|
||||
"quality_profile": check_for_attribute(self.data, "quality_profile", parent="radarr", default_is_none=True),
|
||||
"tag": check_for_attribute(self.data, "tag", parent="radarr", var_type="lower_list", default_is_none=True),
|
||||
"search": check_for_attribute(self.data, "search", parent="radarr", var_type="bool", default=False)
|
||||
}
|
||||
self.general["sonarr"] = {
|
||||
"url": check_for_attribute(self.data, "url", parent="sonarr", var_type="url", default_is_none=True),
|
||||
"token": check_for_attribute(self.data, "token", parent="sonarr", default_is_none=True),
|
||||
"add": check_for_attribute(self.data, "add", parent="sonarr", var_type="bool", default=False),
|
||||
"add_existing": check_for_attribute(self.data, "add_existing", parent="sonarr", var_type="bool", default=False),
|
||||
"root_folder_path": check_for_attribute(self.data, "root_folder_path", parent="sonarr", default_is_none=True),
|
||||
"monitor": check_for_attribute(self.data, "monitor", parent="sonarr", test_list=sonarr.monitor_descriptions, default="all"),
|
||||
"quality_profile": check_for_attribute(self.data, "quality_profile", parent="sonarr", default_is_none=True),
|
||||
"language_profile": check_for_attribute(self.data, "language_profile", parent="sonarr", default_is_none=True),
|
||||
"series_type": check_for_attribute(self.data, "series_type", parent="sonarr", test_list=sonarr.series_type_descriptions, default="standard"),
|
||||
"season_folder": check_for_attribute(self.data, "season_folder", parent="sonarr", var_type="bool", default=True),
|
||||
"tag": check_for_attribute(self.data, "tag", parent="sonarr", var_type="lower_list", default_is_none=True),
|
||||
"search": check_for_attribute(self.data, "search", parent="sonarr", var_type="bool", default=False),
|
||||
"cutoff_search": check_for_attribute(self.data, "cutoff_search", parent="sonarr", var_type="bool", default=False)
|
||||
}
|
||||
self.general["tautulli"] = {
|
||||
"url": check_for_attribute(self.data, "url", parent="tautulli", var_type="url", default_is_none=True),
|
||||
"apikey": check_for_attribute(self.data, "apikey", parent="tautulli", default_is_none=True)
|
||||
}
|
||||
|
||||
self.libraries = []
|
||||
libs = check_for_attribute(self.data, "libraries", throw=True)
|
||||
|
@ -326,14 +330,11 @@ class Config:
|
|||
if self.requested_libraries and library_name not in self.requested_libraries:
|
||||
continue
|
||||
util.separator()
|
||||
params = {}
|
||||
params["mapping_name"] = str(library_name)
|
||||
if lib and "library_name" in lib and lib["library_name"]:
|
||||
params["name"] = str(lib["library_name"])
|
||||
display_name = f"{params['name']} ({params['mapping_name']})"
|
||||
else:
|
||||
params["name"] = params["mapping_name"]
|
||||
display_name = params["mapping_name"]
|
||||
params = {
|
||||
"mapping_name": str(library_name),
|
||||
"name": str(lib["library_name"]) if lib and "library_name" in lib and lib["library_name"] else str(library_name)
|
||||
}
|
||||
display_name = f"{params['name']} ({params['mapping_name']})" if lib and "library_name" in lib and lib["library_name"] else params["mapping_name"]
|
||||
|
||||
util.separator(f"{display_name} Configuration")
|
||||
logger.info("")
|
||||
|
@ -343,79 +344,39 @@ class Config:
|
|||
if params["asset_directory"] is None:
|
||||
logger.warning("Config Warning: Assets will not be used asset_directory attribute must be set under config or under this specific Library")
|
||||
|
||||
if lib and "settings" in lib and lib["settings"] and "asset_folders" in lib["settings"]:
|
||||
params["asset_folders"] = check_for_attribute(lib, "asset_folders", parent="settings", var_type="bool", default=self.general["asset_folders"], do_print=False, save=False)
|
||||
else:
|
||||
params["asset_folders"] = check_for_attribute(lib, "asset_folders", var_type="bool", default=self.general["asset_folders"], do_print=False, save=False)
|
||||
params["asset_folders"] = check_for_attribute(lib, "asset_folders", parent="settings", var_type="bool", default=self.general["asset_folders"], do_print=False, save=False)
|
||||
params["assets_for_all"] = check_for_attribute(lib, "assets_for_all", parent="settings", var_type="bool", default=self.general["assets_for_all"], do_print=False, save=False)
|
||||
params["sync_mode"] = check_for_attribute(lib, "sync_mode", parent="settings", test_list=sync_modes, default=self.general["sync_mode"], do_print=False, save=False)
|
||||
params["show_unmanaged"] = check_for_attribute(lib, "show_unmanaged", parent="settings", var_type="bool", default=self.general["show_unmanaged"], do_print=False, save=False)
|
||||
params["show_filtered"] = check_for_attribute(lib, "show_filtered", parent="settings", var_type="bool", default=self.general["show_filtered"], do_print=False, save=False)
|
||||
params["show_missing"] = check_for_attribute(lib, "show_missing", parent="settings", var_type="bool", default=self.general["show_missing"], do_print=False, save=False)
|
||||
params["save_missing"] = check_for_attribute(lib, "save_missing", parent="settings", var_type="bool", default=self.general["save_missing"], do_print=False, save=False)
|
||||
params["missing_only_released"] = check_for_attribute(lib, "missing_only_released", parent="settings", var_type="bool", default=self.general["missing_only_released"], do_print=False, save=False)
|
||||
params["create_asset_folders"] = check_for_attribute(lib, "create_asset_folders", parent="settings", var_type="bool", default=self.general["create_asset_folders"], do_print=False, save=False)
|
||||
|
||||
if lib and "settings" in lib and lib["settings"] and "assets_for_all" in lib["settings"]:
|
||||
params["assets_for_all"] = check_for_attribute(lib, "assets_for_all", parent="settings", var_type="bool", default=self.general["assets_for_all"], do_print=False, save=False)
|
||||
else:
|
||||
params["assets_for_all"] = check_for_attribute(lib, "assets_for_all", var_type="bool", default=self.general["assets_for_all"], do_print=False, save=False)
|
||||
|
||||
if lib and "settings" in lib and lib["settings"] and "sync_mode" in lib["settings"]:
|
||||
params["sync_mode"] = check_for_attribute(lib, "sync_mode", parent="settings", test_list=sync_modes, default=self.general["sync_mode"], do_print=False, save=False)
|
||||
else:
|
||||
params["sync_mode"] = check_for_attribute(lib, "sync_mode", test_list=sync_modes, default=self.general["sync_mode"], do_print=False, save=False)
|
||||
|
||||
if lib and "settings" in lib and lib["settings"] and "show_unmanaged" in lib["settings"]:
|
||||
params["show_unmanaged"] = check_for_attribute(lib, "show_unmanaged", parent="settings", var_type="bool", default=self.general["show_unmanaged"], do_print=False, save=False)
|
||||
else:
|
||||
params["show_unmanaged"] = check_for_attribute(lib, "show_unmanaged", var_type="bool", default=self.general["show_unmanaged"], do_print=False, save=False)
|
||||
|
||||
if lib and "settings" in lib and lib["settings"] and "show_filtered" in lib["settings"]:
|
||||
params["show_filtered"] = check_for_attribute(lib, "show_filtered", parent="settings", var_type="bool", default=self.general["show_filtered"], do_print=False, save=False)
|
||||
else:
|
||||
params["show_filtered"] = check_for_attribute(lib, "show_filtered", var_type="bool", default=self.general["show_filtered"], do_print=False, save=False)
|
||||
|
||||
if lib and "settings" in lib and lib["settings"] and "show_missing" in lib["settings"]:
|
||||
params["show_missing"] = check_for_attribute(lib, "show_missing", parent="settings", var_type="bool", default=self.general["show_missing"], do_print=False, save=False)
|
||||
else:
|
||||
params["show_missing"] = check_for_attribute(lib, "show_missing", var_type="bool", default=self.general["show_missing"], do_print=False, save=False)
|
||||
|
||||
if lib and "settings" in lib and lib["settings"] and "save_missing" in lib["settings"]:
|
||||
params["save_missing"] = check_for_attribute(lib, "save_missing", parent="settings", var_type="bool", default=self.general["save_missing"], do_print=False, save=False)
|
||||
else:
|
||||
params["save_missing"] = check_for_attribute(lib, "save_missing", var_type="bool", default=self.general["save_missing"], do_print=False, save=False)
|
||||
|
||||
if lib and "mass_genre_update" in lib and lib["mass_genre_update"]:
|
||||
params["mass_genre_update"] = check_for_attribute(lib, "mass_genre_update", test_list=mass_update_options, default_is_none=True, save=False)
|
||||
if self.OMDb is None and params["mass_genre_update"] == "omdb":
|
||||
params["mass_genre_update"] = None
|
||||
logger.error("Config Error: mass_genre_update cannot be omdb without a successful OMDb Connection")
|
||||
else:
|
||||
params["mass_genre_update"] = check_for_attribute(lib, "mass_genre_update", test_list=mass_update_options, default_is_none=True, save=False, do_print=lib and "mass_genre_update" in lib)
|
||||
if self.OMDb is None and params["mass_genre_update"] == "omdb":
|
||||
params["mass_genre_update"] = None
|
||||
logger.error("Config Error: mass_genre_update cannot be omdb without a successful OMDb Connection")
|
||||
|
||||
if lib and "mass_audience_rating_update" in lib and lib["mass_audience_rating_update"]:
|
||||
params["mass_audience_rating_update"] = check_for_attribute(lib, "mass_audience_rating_update", test_list=mass_update_options, default_is_none=True, save=False)
|
||||
if self.OMDb is None and params["mass_audience_rating_update"] == "omdb":
|
||||
params["mass_audience_rating_update"] = None
|
||||
logger.error("Config Error: mass_audience_rating_update cannot be omdb without a successful OMDb Connection")
|
||||
else:
|
||||
params["mass_audience_rating_update"] = check_for_attribute(lib, "mass_audience_rating_update", test_list=mass_update_options, default_is_none=True, save=False, do_print=lib and "mass_audience_rating_update" in lib)
|
||||
if self.OMDb is None and params["mass_audience_rating_update"] == "omdb":
|
||||
params["mass_audience_rating_update"] = None
|
||||
logger.error("Config Error: mass_audience_rating_update cannot be omdb without a successful OMDb Connection")
|
||||
|
||||
if lib and "mass_critic_rating_update" in lib and lib["mass_critic_rating_update"]:
|
||||
params["mass_critic_rating_update"] = check_for_attribute(lib, "mass_critic_rating_update", test_list=mass_update_options, default_is_none=True, save=False)
|
||||
if self.OMDb is None and params["mass_critic_rating_update"] == "omdb":
|
||||
params["mass_critic_rating_update"] = None
|
||||
logger.error("Config Error: mass_critic_rating_update cannot be omdb without a successful OMDb Connection")
|
||||
else:
|
||||
params["mass_critic_rating_update"] = check_for_attribute(lib, "mass_critic_rating_update", test_list=mass_update_options, default_is_none=True, save=False, do_print=lib and "mass_audience_rating_update" in lib)
|
||||
if self.OMDb is None and params["mass_critic_rating_update"] == "omdb":
|
||||
params["mass_critic_rating_update"] = None
|
||||
logger.error("Config Error: mass_critic_rating_update cannot be omdb without a successful OMDb Connection")
|
||||
|
||||
if lib and "split_duplicates" in lib and lib["split_duplicates"]:
|
||||
params["split_duplicates"] = check_for_attribute(lib, "split_duplicates", var_type="bool", default=False, save=False)
|
||||
else:
|
||||
params["split_duplicates"] = None
|
||||
params["mass_trakt_rating_update"] = check_for_attribute(lib, "mass_trakt_rating_update", var_type="bool", default=False, save=False, do_print=lib and "mass_trakt_rating_update" in lib)
|
||||
if self.Trakt is None and params["mass_trakt_rating_update"]:
|
||||
params["mass_trakt_rating_update"] = None
|
||||
logger.error("Config Error: mass_trakt_rating_update cannot run without a successful Trakt Connection")
|
||||
|
||||
if lib and "radarr_add_all" in lib and lib["radarr_add_all"]:
|
||||
params["radarr_add_all"] = check_for_attribute(lib, "radarr_add_all", var_type="bool", default=False, save=False)
|
||||
else:
|
||||
params["radarr_add_all"] = None
|
||||
|
||||
if lib and "sonarr_add_all" in lib and lib["sonarr_add_all"]:
|
||||
params["sonarr_add_all"] = check_for_attribute(lib, "sonarr_add_all", var_type="bool", default=False, save=False)
|
||||
else:
|
||||
params["sonarr_add_all"] = None
|
||||
params["split_duplicates"] = check_for_attribute(lib, "split_duplicates", var_type="bool", default=False, save=False, do_print=lib and "split_duplicates" in lib)
|
||||
params["radarr_add_all"] = check_for_attribute(lib, "radarr_add_all", var_type="bool", default=False, save=False, do_print=lib and "radarr_add_all" in lib)
|
||||
params["sonarr_add_all"] = check_for_attribute(lib, "sonarr_add_all", var_type="bool", default=False, save=False, do_print=lib and "sonarr_add_all" in lib)
|
||||
|
||||
try:
|
||||
if lib and "metadata_path" in lib:
|
||||
|
@ -425,33 +386,29 @@ class Config:
|
|||
paths_to_check = lib["metadata_path"] if isinstance(lib["metadata_path"], list) else [lib["metadata_path"]]
|
||||
for path in paths_to_check:
|
||||
if isinstance(path, dict):
|
||||
if "url" in path:
|
||||
if path["url"] is None:
|
||||
logger.error("Config Error: metadata_path url is blank")
|
||||
else:
|
||||
params["metadata_path"].append(("URL", path["url"]))
|
||||
if "git" in path:
|
||||
if path["git"] is None:
|
||||
logger.error("Config Error: metadata_path git is blank")
|
||||
else:
|
||||
params["metadata_path"].append(("Git", path['git']))
|
||||
if "file" in path:
|
||||
if path["file"] is None:
|
||||
logger.error("Config Error: metadata_path file is blank")
|
||||
else:
|
||||
params["metadata_path"].append(("File", path['file']))
|
||||
def check_dict(attr, name):
|
||||
if attr in path:
|
||||
if path[attr] is None:
|
||||
logger.error(f"Config Error: metadata_path {attr} is blank")
|
||||
else:
|
||||
params["metadata_path"].append((name, path[attr]))
|
||||
check_dict("url", "URL")
|
||||
check_dict("git", "Git")
|
||||
check_dict("file", "File")
|
||||
check_dict("folder", "Folder")
|
||||
else:
|
||||
params["metadata_path"].append(("File", path))
|
||||
else:
|
||||
params["metadata_path"] = [("File", os.path.join(default_dir, f"{library_name}.yml"))]
|
||||
params["default_dir"] = default_dir
|
||||
params["plex"] = {}
|
||||
params["plex"]["url"] = check_for_attribute(lib, "url", parent="plex", var_type="url", default=self.general["plex"]["url"], req_default=True, save=False)
|
||||
params["plex"]["token"] = check_for_attribute(lib, "token", parent="plex", default=self.general["plex"]["token"], req_default=True, save=False)
|
||||
params["plex"]["timeout"] = check_for_attribute(lib, "timeout", parent="plex", var_type="int", default=self.general["plex"]["timeout"], save=False)
|
||||
params["plex"]["clean_bundles"] = check_for_attribute(lib, "clean_bundles", parent="plex", var_type="bool", default=self.general["plex"]["clean_bundles"], save=False)
|
||||
params["plex"]["empty_trash"] = check_for_attribute(lib, "empty_trash", parent="plex", var_type="bool", default=self.general["plex"]["empty_trash"], save=False)
|
||||
params["plex"]["optimize"] = check_for_attribute(lib, "optimize", parent="plex", var_type="bool", default=self.general["plex"]["optimize"], save=False)
|
||||
params["plex"] = {
|
||||
"url": check_for_attribute(lib, "url", parent="plex", var_type="url", default=self.general["plex"]["url"], req_default=True, save=False),
|
||||
"token": check_for_attribute(lib, "token", parent="plex", default=self.general["plex"]["token"], req_default=True, save=False),
|
||||
"timeout": check_for_attribute(lib, "timeout", parent="plex", var_type="int", default=self.general["plex"]["timeout"], save=False),
|
||||
"clean_bundles": check_for_attribute(lib, "clean_bundles", parent="plex", var_type="bool", default=self.general["plex"]["clean_bundles"], save=False),
|
||||
"empty_trash": check_for_attribute(lib, "empty_trash", parent="plex", var_type="bool", default=self.general["plex"]["empty_trash"], save=False),
|
||||
"optimize": check_for_attribute(lib, "optimize", parent="plex", var_type="bool", default=self.general["plex"]["optimize"], save=False)
|
||||
}
|
||||
library = Plex(self, params)
|
||||
logger.info("")
|
||||
logger.info(f"{display_name} Library Connection Successful")
|
||||
|
@ -467,18 +424,19 @@ class Config:
|
|||
logger.info("")
|
||||
logger.info(f"Connecting to {display_name} library's Radarr...")
|
||||
logger.info("")
|
||||
radarr_params = {}
|
||||
try:
|
||||
radarr_params["url"] = check_for_attribute(lib, "url", parent="radarr", var_type="url", default=self.general["radarr"]["url"], req_default=True, save=False)
|
||||
radarr_params["token"] = check_for_attribute(lib, "token", parent="radarr", default=self.general["radarr"]["token"], req_default=True, save=False)
|
||||
radarr_params["add"] = check_for_attribute(lib, "add", parent="radarr", var_type="bool", default=self.general["radarr"]["add"], save=False)
|
||||
radarr_params["root_folder_path"] = check_for_attribute(lib, "root_folder_path", parent="radarr", default=self.general["radarr"]["root_folder_path"], req_default=True, save=False)
|
||||
radarr_params["monitor"] = check_for_attribute(lib, "monitor", parent="radarr", var_type="bool", default=self.general["radarr"]["monitor"], save=False)
|
||||
radarr_params["availability"] = check_for_attribute(lib, "availability", parent="radarr", test_list=radarr_availabilities, default=self.general["radarr"]["availability"], save=False)
|
||||
radarr_params["quality_profile"] = check_for_attribute(lib, "quality_profile", parent="radarr", default=self.general["radarr"]["quality_profile"], req_default=True, save=False)
|
||||
radarr_params["tag"] = check_for_attribute(lib, "search", parent="radarr", var_type="lower_list", default=self.general["radarr"]["tag"], default_is_none=True, save=False)
|
||||
radarr_params["search"] = check_for_attribute(lib, "search", parent="radarr", var_type="bool", default=self.general["radarr"]["search"], save=False)
|
||||
library.Radarr = Radarr(radarr_params)
|
||||
library.Radarr = Radarr(self, {
|
||||
"url": check_for_attribute(lib, "url", parent="radarr", var_type="url", default=self.general["radarr"]["url"], req_default=True, save=False),
|
||||
"token": check_for_attribute(lib, "token", parent="radarr", default=self.general["radarr"]["token"], req_default=True, save=False),
|
||||
"add": check_for_attribute(lib, "add", parent="radarr", var_type="bool", default=self.general["radarr"]["add"], save=False),
|
||||
"add_existing": check_for_attribute(lib, "add_existing", parent="radarr", var_type="bool", default=self.general["radarr"]["add_existing"], save=False),
|
||||
"root_folder_path": check_for_attribute(lib, "root_folder_path", parent="radarr", default=self.general["radarr"]["root_folder_path"], req_default=True, save=False),
|
||||
"monitor": check_for_attribute(lib, "monitor", parent="radarr", var_type="bool", default=self.general["radarr"]["monitor"], save=False),
|
||||
"availability": check_for_attribute(lib, "availability", parent="radarr", test_list=radarr.availability_descriptions, default=self.general["radarr"]["availability"], save=False),
|
||||
"quality_profile": check_for_attribute(lib, "quality_profile", parent="radarr",default=self.general["radarr"]["quality_profile"], req_default=True, save=False),
|
||||
"tag": check_for_attribute(lib, "tag", parent="radarr", var_type="lower_list", default=self.general["radarr"]["tag"], default_is_none=True, save=False),
|
||||
"search": check_for_attribute(lib, "search", parent="radarr", var_type="bool", default=self.general["radarr"]["search"], save=False)
|
||||
})
|
||||
except Failed as e:
|
||||
util.print_stacktrace()
|
||||
util.print_multiline(e, error=True)
|
||||
|
@ -491,24 +449,22 @@ class Config:
|
|||
logger.info("")
|
||||
logger.info(f"Connecting to {display_name} library's Sonarr...")
|
||||
logger.info("")
|
||||
sonarr_params = {}
|
||||
try:
|
||||
sonarr_params["url"] = check_for_attribute(lib, "url", parent="sonarr", var_type="url", default=self.general["sonarr"]["url"], req_default=True, save=False)
|
||||
sonarr_params["token"] = check_for_attribute(lib, "token", parent="sonarr", default=self.general["sonarr"]["token"], req_default=True, save=False)
|
||||
sonarr_params["add"] = check_for_attribute(lib, "add", parent="sonarr", var_type="bool", default=self.general["sonarr"]["add"], save=False)
|
||||
sonarr_params["root_folder_path"] = check_for_attribute(lib, "root_folder_path", parent="sonarr", default=self.general["sonarr"]["root_folder_path"], req_default=True, save=False)
|
||||
sonarr_params["monitor"] = check_for_attribute(lib, "monitor", parent="sonarr", test_list=sonarr_monitors, default=self.general["sonarr"]["monitor"], save=False)
|
||||
sonarr_params["quality_profile"] = check_for_attribute(lib, "quality_profile", parent="sonarr", default=self.general["sonarr"]["quality_profile"], req_default=True, save=False)
|
||||
if self.general["sonarr"]["language_profile"]:
|
||||
sonarr_params["language_profile"] = check_for_attribute(lib, "language_profile", parent="sonarr", default=self.general["sonarr"]["language_profile"], save=False)
|
||||
else:
|
||||
sonarr_params["language_profile"] = check_for_attribute(lib, "language_profile", parent="sonarr", default_is_none=True, save=False)
|
||||
sonarr_params["series_type"] = check_for_attribute(lib, "series_type", parent="sonarr", test_list=sonarr_series_types, default=self.general["sonarr"]["series_type"], save=False)
|
||||
sonarr_params["season_folder"] = check_for_attribute(lib, "season_folder", parent="sonarr", var_type="bool", default=self.general["sonarr"]["season_folder"], save=False)
|
||||
sonarr_params["tag"] = check_for_attribute(lib, "search", parent="sonarr", var_type="lower_list", default=self.general["sonarr"]["tag"], default_is_none=True, save=False)
|
||||
sonarr_params["search"] = check_for_attribute(lib, "search", parent="sonarr", var_type="bool", default=self.general["sonarr"]["search"], save=False)
|
||||
sonarr_params["cutoff_search"] = check_for_attribute(lib, "cutoff_search", parent="sonarr", var_type="bool", default=self.general["sonarr"]["cutoff_search"], save=False)
|
||||
library.Sonarr = Sonarr(sonarr_params)
|
||||
library.Sonarr = Sonarr(self, {
|
||||
"url": check_for_attribute(lib, "url", parent="sonarr", var_type="url", default=self.general["sonarr"]["url"], req_default=True, save=False),
|
||||
"token": check_for_attribute(lib, "token", parent="sonarr", default=self.general["sonarr"]["token"], req_default=True, save=False),
|
||||
"add": check_for_attribute(lib, "add", parent="sonarr", var_type="bool", default=self.general["sonarr"]["add"], save=False),
|
||||
"add_existing": check_for_attribute(lib, "add_existing", parent="sonarr", var_type="bool", default=self.general["sonarr"]["add_existing"], save=False),
|
||||
"root_folder_path": check_for_attribute(lib, "root_folder_path", parent="sonarr", default=self.general["sonarr"]["root_folder_path"], req_default=True, save=False),
|
||||
"monitor": check_for_attribute(lib, "monitor", parent="sonarr", test_list=sonarr.monitor_descriptions, default=self.general["sonarr"]["monitor"], save=False),
|
||||
"quality_profile": check_for_attribute(lib, "quality_profile", parent="sonarr", default=self.general["sonarr"]["quality_profile"], req_default=True, save=False),
|
||||
"language_profile": check_for_attribute(lib, "language_profile", parent="sonarr", default=self.general["sonarr"]["language_profile"], save=False) if self.general["sonarr"]["language_profile"] else check_for_attribute(lib, "language_profile", parent="sonarr", default_is_none=True, save=False),
|
||||
"series_type": check_for_attribute(lib, "series_type", parent="sonarr", test_list=sonarr.series_type_descriptions, default=self.general["sonarr"]["series_type"], save=False),
|
||||
"season_folder": check_for_attribute(lib, "season_folder", parent="sonarr", var_type="bool", default=self.general["sonarr"]["season_folder"], save=False),
|
||||
"tag": check_for_attribute(lib, "tag", parent="sonarr", var_type="lower_list", default=self.general["sonarr"]["tag"], default_is_none=True, save=False),
|
||||
"search": check_for_attribute(lib, "search", parent="sonarr", var_type="bool", default=self.general["sonarr"]["search"], save=False),
|
||||
"cutoff_search": check_for_attribute(lib, "cutoff_search", parent="sonarr", var_type="bool", default=self.general["sonarr"]["cutoff_search"], save=False)
|
||||
})
|
||||
except Failed as e:
|
||||
util.print_stacktrace()
|
||||
util.print_multiline(e, error=True)
|
||||
|
@ -521,11 +477,11 @@ class Config:
|
|||
logger.info("")
|
||||
logger.info(f"Connecting to {display_name} library's Tautulli...")
|
||||
logger.info("")
|
||||
tautulli_params = {}
|
||||
try:
|
||||
tautulli_params["url"] = check_for_attribute(lib, "url", parent="tautulli", var_type="url", default=self.general["tautulli"]["url"], req_default=True, save=False)
|
||||
tautulli_params["apikey"] = check_for_attribute(lib, "apikey", parent="tautulli", default=self.general["tautulli"]["apikey"], req_default=True, save=False)
|
||||
library.Tautulli = Tautulli(tautulli_params)
|
||||
library.Tautulli = Tautulli(self, {
|
||||
"url": check_for_attribute(lib, "url", parent="tautulli", var_type="url", default=self.general["tautulli"]["url"], req_default=True, save=False),
|
||||
"apikey": check_for_attribute(lib, "apikey", parent="tautulli", default=self.general["tautulli"]["apikey"], req_default=True, save=False)
|
||||
})
|
||||
except Failed as e:
|
||||
util.print_stacktrace()
|
||||
util.print_multiline(e, error=True)
|
||||
|
@ -544,3 +500,22 @@ class Config:
|
|||
|
||||
util.separator()
|
||||
|
||||
def get_html(self, url, headers=None, params=None):
|
||||
return html.fromstring(self.get(url, headers=headers, params=params).content)
|
||||
|
||||
def get_json(self, url, headers=None):
|
||||
return self.get(url, headers=headers).json()
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||
def get(self, url, headers=None, params=None):
|
||||
return self.session.get(url, headers=headers, params=params)
|
||||
|
||||
def post_html(self, url, data=None, json=None, headers=None):
|
||||
return html.fromstring(self.post(url, data=data, json=json, headers=headers).content)
|
||||
|
||||
def post_json(self, url, data=None, json=None, headers=None):
|
||||
return self.post(url, data=data, json=json, headers=headers).json()
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||
def post(self, url, data=None, json=None, headers=None):
|
||||
return self.session.post(url, data=data, json=json, headers=headers)
|
||||
|
|
|
@ -1,41 +1,43 @@
|
|||
import logging, re, requests
|
||||
from lxml import html
|
||||
from modules import util
|
||||
from modules.util import Failed
|
||||
from plexapi.exceptions import BadRequest
|
||||
from retrying import retry
|
||||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
arms_url = "https://relations.yuna.moe/api/ids"
|
||||
anidb_url = "https://raw.githubusercontent.com/Anime-Lists/anime-lists/master/anime-list-master.xml"
|
||||
|
||||
class Convert:
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.arms_url = "https://relations.yuna.moe/api/ids"
|
||||
self.anidb_url = "https://raw.githubusercontent.com/Anime-Lists/anime-lists/master/anime-list-master.xml"
|
||||
self.AniDBIDs = self._get_anidb()
|
||||
self.AniDBIDs = self.config.get_html(anidb_url)
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||
def _get_anidb(self):
|
||||
return html.fromstring(requests.get(self.anidb_url).content)
|
||||
|
||||
def _anidb(self, input_id, to_id, fail=False):
|
||||
ids = self.AniDBIDs.xpath(f"//anime[contains(@anidbid, '{input_id}')]/@{to_id}")
|
||||
if len(ids) > 0:
|
||||
try:
|
||||
if len(ids[0]) > 0:
|
||||
return util.get_list(ids[0]) if to_id == "imdbid" else int(ids[0])
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
fail_text = f"Convert Error: No {util.pretty_ids[to_id]} ID found for AniDB ID: {input_id}"
|
||||
def _anidb(self, anidb_id, fail=False):
|
||||
tvdbid = self.AniDBIDs.xpath(f"//anime[contains(@anidbid, '{anidb_id}')]/@tvdbid")
|
||||
imdbid = self.AniDBIDs.xpath(f"//anime[contains(@anidbid, '{anidb_id}')]/@imdbid")
|
||||
if len(tvdbid) > 0:
|
||||
if len(imdbid[0]) > 0:
|
||||
imdb_ids = util.get_list(imdbid[0])
|
||||
tmdb_ids = []
|
||||
for imdb in imdb_ids:
|
||||
tmdb_id, tmdb_type = self.imdb_to_tmdb(imdb)
|
||||
if tmdb_id and tmdb_type == "movie":
|
||||
tmdb_ids.append(tmdb_id)
|
||||
if tmdb_ids:
|
||||
return None, imdb_ids, tmdb_ids
|
||||
else:
|
||||
fail_text = f"Convert Error: No TMDb ID found for AniDB ID: {anidb_id}"
|
||||
else:
|
||||
try:
|
||||
return int(tvdbid[0]), [], []
|
||||
except ValueError:
|
||||
fail_text = f"Convert Error: No TVDb ID or IMDb ID found for AniDB ID: {anidb_id}"
|
||||
else:
|
||||
fail_text = f"Convert Error: AniDB ID: {input_id} not found"
|
||||
fail_text = f"Convert Error: AniDB ID: {anidb_id} not found"
|
||||
if fail:
|
||||
raise Failed(fail_text)
|
||||
return [] if to_id == "imdbid" else None
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||
def _request(self, ids):
|
||||
return requests.post(self.arms_url, json=ids).json()
|
||||
return None, [], []
|
||||
|
||||
def _arms_ids(self, anilist_ids=None, anidb_ids=None, mal_ids=None):
|
||||
all_ids = []
|
||||
|
@ -68,7 +70,7 @@ class Convert:
|
|||
if len(unconverted_ids) > 0:
|
||||
unconverted_id_sets.append(unconverted_ids)
|
||||
for unconverted_id_set in unconverted_id_sets:
|
||||
for anime_ids in self._request(unconverted_id_set):
|
||||
for anime_ids in self.config.post_json(arms_url, json=unconverted_id_set):
|
||||
if anime_ids:
|
||||
if self.config.Cache:
|
||||
self.config.Cache.update_anime_map(False, anime_ids)
|
||||
|
@ -76,24 +78,17 @@ class Convert:
|
|||
return converted_ids
|
||||
|
||||
def anidb_to_ids(self, anidb_list):
|
||||
show_ids = []
|
||||
movie_ids = []
|
||||
ids = []
|
||||
for anidb_id in anidb_list:
|
||||
imdb_ids = self.anidb_to_imdb(anidb_id)
|
||||
tmdb_ids = []
|
||||
if imdb_ids:
|
||||
for imdb_id in imdb_ids:
|
||||
tmdb_id = self.imdb_to_tmdb(imdb_id)
|
||||
if tmdb_id:
|
||||
tmdb_ids.append(tmdb_id)
|
||||
tvdb_id = self.anidb_to_tvdb(anidb_id)
|
||||
if tvdb_id:
|
||||
show_ids.append(tvdb_id)
|
||||
if tmdb_ids:
|
||||
movie_ids.extend(tmdb_ids)
|
||||
if not tvdb_id and not tmdb_ids:
|
||||
logger.error(f"Convert Error: No TVDb ID or IMDb ID found for AniDB ID: {anidb_id}")
|
||||
return movie_ids, show_ids
|
||||
try:
|
||||
tvdb_id, _, tmdb_ids = self._anidb(anidb_id, fail=True)
|
||||
if tvdb_id:
|
||||
ids.append((tvdb_id, "tvdb"))
|
||||
if tmdb_ids:
|
||||
ids.extend([(t, "tmdb") for t in tmdb_ids])
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
return ids
|
||||
|
||||
def anilist_to_ids(self, anilist_ids):
|
||||
anidb_ids = []
|
||||
|
@ -113,55 +108,44 @@ class Convert:
|
|||
logger.error(f"Convert Error: AniDB ID not found for MyAnimeList ID: {id_set['myanimelist']}")
|
||||
return self.anidb_to_ids(anidb_ids)
|
||||
|
||||
def anidb_to_tvdb(self, anidb_id, fail=False):
|
||||
return self._anidb(anidb_id, "tvdbid", fail=fail)
|
||||
|
||||
def anidb_to_imdb(self, anidb_id, fail=False):
|
||||
return self._anidb(anidb_id, "imdbid", fail=fail)
|
||||
|
||||
def tmdb_to_imdb(self, tmdb_id, is_movie=True, fail=False):
|
||||
media_type = "movie" if is_movie else "show"
|
||||
expired = False
|
||||
if self.config.Cache and is_movie:
|
||||
cache_id, expired = self.config.Cache.query_imdb_to_tmdb_map(media_type, tmdb_id, imdb=False)
|
||||
cache_id, expired = self.config.Cache.query_imdb_to_tmdb_map(tmdb_id, imdb=False, media_type=media_type)
|
||||
if cache_id and not expired:
|
||||
return cache_id
|
||||
imdb_id = None
|
||||
try:
|
||||
imdb_id = self.config.TMDb.convert_from(tmdb_id, "imdb_id", is_movie)
|
||||
if imdb_id:
|
||||
if self.config.Cache:
|
||||
self.config.Cache.update_imdb_to_tmdb_map(media_type, expired, imdb_id, tmdb_id)
|
||||
return imdb_id
|
||||
except Failed:
|
||||
if self.config.Trakt:
|
||||
try:
|
||||
imdb_id = self.config.Trakt.convert(tmdb_id, "tmdb", "imdb", "movie" if is_movie else "show")
|
||||
except Failed:
|
||||
pass
|
||||
if fail and imdb_id is None:
|
||||
pass
|
||||
if fail:
|
||||
raise Failed(f"Convert Error: No IMDb ID Found for TMDb ID: {tmdb_id}")
|
||||
if self.config.Cache and imdb_id:
|
||||
self.config.Cache.update_imdb_to_tmdb_map(media_type, expired, imdb_id, tmdb_id)
|
||||
return imdb_id
|
||||
else:
|
||||
return None
|
||||
|
||||
def imdb_to_tmdb(self, imdb_id, is_movie=True, fail=False):
|
||||
media_type = "movie" if is_movie else "show"
|
||||
def imdb_to_tmdb(self, imdb_id, fail=False):
|
||||
expired = False
|
||||
if self.config.Cache and is_movie:
|
||||
cache_id, expired = self.config.Cache.query_imdb_to_tmdb_map(media_type, imdb_id, imdb=True)
|
||||
if self.config.Cache:
|
||||
cache_id, cache_type, expired = self.config.Cache.query_imdb_to_tmdb_map(imdb_id, imdb=True, return_type=True)
|
||||
if cache_id and not expired:
|
||||
return cache_id
|
||||
tmdb_id = None
|
||||
return cache_id, cache_type
|
||||
try:
|
||||
tmdb_id = self.config.TMDb.convert_to(imdb_id, "imdb_id", is_movie)
|
||||
tmdb_id, tmdb_type = self.config.TMDb.convert_imdb_to(imdb_id)
|
||||
if tmdb_id:
|
||||
if self.config.Cache:
|
||||
self.config.Cache.update_imdb_to_tmdb_map(tmdb_type, expired, imdb_id, tmdb_id)
|
||||
return tmdb_id, tmdb_type
|
||||
except Failed:
|
||||
if self.config.Trakt:
|
||||
try:
|
||||
tmdb_id = self.config.Trakt.convert(imdb_id, "imdb", "tmdb", media_type)
|
||||
except Failed:
|
||||
pass
|
||||
if fail and tmdb_id is None:
|
||||
pass
|
||||
if fail:
|
||||
raise Failed(f"Convert Error: No TMDb ID Found for IMDb ID: {imdb_id}")
|
||||
if self.config.Cache and tmdb_id:
|
||||
self.config.Cache.update_imdb_to_tmdb_map(media_type, expired, imdb_id, tmdb_id)
|
||||
return tmdb_id
|
||||
else:
|
||||
return None, None
|
||||
|
||||
def tmdb_to_tvdb(self, tmdb_id, fail=False):
|
||||
expired = False
|
||||
|
@ -169,20 +153,18 @@ class Convert:
|
|||
cache_id, expired = self.config.Cache.query_tmdb_to_tvdb_map(tmdb_id, tmdb=True)
|
||||
if cache_id and not expired:
|
||||
return cache_id
|
||||
tvdb_id = None
|
||||
try:
|
||||
tvdb_id = self.config.TMDb.convert_from(tmdb_id, "tvdb_id", False)
|
||||
if tvdb_id:
|
||||
if self.config.Cache:
|
||||
self.config.Cache.update_tmdb_to_tvdb_map(expired, tmdb_id, tvdb_id)
|
||||
return tvdb_id
|
||||
except Failed:
|
||||
if self.config.Trakt:
|
||||
try:
|
||||
tvdb_id = self.config.Trakt.convert(tmdb_id, "tmdb", "tvdb", "show")
|
||||
except Failed:
|
||||
pass
|
||||
if fail and tvdb_id is None:
|
||||
pass
|
||||
if fail:
|
||||
raise Failed(f"Convert Error: No TVDb ID Found for TMDb ID: {tmdb_id}")
|
||||
if self.config.Cache and tvdb_id:
|
||||
self.config.Cache.update_tmdb_to_tvdb_map(expired, tmdb_id, tvdb_id)
|
||||
return tvdb_id
|
||||
else:
|
||||
return None
|
||||
|
||||
def tvdb_to_tmdb(self, tvdb_id, fail=False):
|
||||
expired = False
|
||||
|
@ -190,20 +172,18 @@ class Convert:
|
|||
cache_id, expired = self.config.Cache.query_tmdb_to_tvdb_map(tvdb_id, tmdb=False)
|
||||
if cache_id and not expired:
|
||||
return cache_id
|
||||
tmdb_id = None
|
||||
try:
|
||||
tmdb_id = self.config.TMDb.convert_to(tvdb_id, "tvdb_id", False)
|
||||
tmdb_id = self.config.TMDb.convert_tvdb_to(tvdb_id)
|
||||
if tmdb_id:
|
||||
if self.config.Cache:
|
||||
self.config.Cache.update_tmdb_to_tvdb_map(expired, tmdb_id, tvdb_id)
|
||||
return tmdb_id
|
||||
except Failed:
|
||||
if self.config.Trakt:
|
||||
try:
|
||||
tmdb_id = self.config.Trakt.convert(tvdb_id, "tvdb", "tmdb", "show")
|
||||
except Failed:
|
||||
pass
|
||||
if fail and tmdb_id is None:
|
||||
pass
|
||||
if fail:
|
||||
raise Failed(f"Convert Error: No TMDb ID Found for TVDb ID: {tvdb_id}")
|
||||
if self.config.Cache and tmdb_id:
|
||||
self.config.Cache.update_tmdb_to_tvdb_map(expired, tmdb_id, tvdb_id)
|
||||
return tmdb_id
|
||||
else:
|
||||
return None
|
||||
|
||||
def tvdb_to_imdb(self, tvdb_id, fail=False):
|
||||
expired = False
|
||||
|
@ -211,20 +191,18 @@ class Convert:
|
|||
cache_id, expired = self.config.Cache.query_imdb_to_tvdb_map(tvdb_id, imdb=False)
|
||||
if cache_id and not expired:
|
||||
return cache_id
|
||||
imdb_id = None
|
||||
try:
|
||||
imdb_id = self.tmdb_to_imdb(self.tvdb_to_tmdb(tvdb_id, fail=True), is_movie=False, fail=True)
|
||||
if imdb_id:
|
||||
if self.config.Cache:
|
||||
self.config.Cache.update_imdb_to_tvdb_map(expired, imdb_id, tvdb_id)
|
||||
return imdb_id
|
||||
except Failed:
|
||||
if self.config.Trakt:
|
||||
try:
|
||||
imdb_id = self.config.Trakt.convert(tvdb_id, "tvdb", "imdb", "show")
|
||||
except Failed:
|
||||
pass
|
||||
if fail and imdb_id is None:
|
||||
pass
|
||||
if fail:
|
||||
raise Failed(f"Convert Error: No IMDb ID Found for TVDb ID: {tvdb_id}")
|
||||
if self.config.Cache and imdb_id:
|
||||
self.config.Cache.update_imdb_to_tvdb_map(expired, imdb_id, tvdb_id)
|
||||
return imdb_id
|
||||
else:
|
||||
return None
|
||||
|
||||
def imdb_to_tvdb(self, imdb_id, fail=False):
|
||||
expired = False
|
||||
|
@ -232,41 +210,38 @@ class Convert:
|
|||
cache_id, expired = self.config.Cache.query_imdb_to_tvdb_map(imdb_id, imdb=True)
|
||||
if cache_id and not expired:
|
||||
return cache_id
|
||||
tvdb_id = None
|
||||
try:
|
||||
tvdb_id = self.tmdb_to_tvdb(self.imdb_to_tmdb(imdb_id, is_movie=False, fail=True), fail=True)
|
||||
tmdb_id, tmdb_type = self.imdb_to_tmdb(imdb_id, fail=True)
|
||||
if tmdb_type == "show":
|
||||
tvdb_id = self.tmdb_to_tvdb(tmdb_id, fail=True)
|
||||
if tvdb_id:
|
||||
if self.config.Cache:
|
||||
self.config.Cache.update_imdb_to_tvdb_map(expired, imdb_id, tvdb_id)
|
||||
return tvdb_id
|
||||
except Failed:
|
||||
if self.config.Trakt:
|
||||
try:
|
||||
tvdb_id = self.config.Trakt.convert(imdb_id, "imdb", "tvdb", "show")
|
||||
except Failed:
|
||||
pass
|
||||
if fail and tvdb_id is None:
|
||||
pass
|
||||
if fail:
|
||||
raise Failed(f"Convert Error: No TVDb ID Found for IMDb ID: {imdb_id}")
|
||||
if self.config.Cache and tvdb_id:
|
||||
self.config.Cache.update_imdb_to_tvdb_map(expired, imdb_id, tvdb_id)
|
||||
return tvdb_id
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_id(self, item, library):
|
||||
expired = None
|
||||
tmdb_id = []
|
||||
tvdb_id = []
|
||||
imdb_id = []
|
||||
anidb_id = None
|
||||
if self.config.Cache:
|
||||
cache_id, media_type, expired = self.config.Cache.query_guid_map(item.guid)
|
||||
cache_id, imdb_check, media_type, expired = self.config.Cache.query_guid_map(item.guid)
|
||||
if cache_id and not expired:
|
||||
media_id_type = "movie" if "movie" in media_type else "show"
|
||||
return media_id_type, util.get_list(cache_id, int_list=True)
|
||||
return media_id_type, cache_id, imdb_check
|
||||
try:
|
||||
tmdb_id = None
|
||||
imdb_id = None
|
||||
tvdb_id = None
|
||||
anidb_id = None
|
||||
guid = requests.utils.urlparse(item.guid)
|
||||
item_type = guid.scheme.split(".")[-1]
|
||||
check_id = guid.netloc
|
||||
|
||||
if item_type == "plex":
|
||||
tmdb_id = []
|
||||
imdb_id = []
|
||||
tvdb_id = []
|
||||
try:
|
||||
for guid_tag in library.get_guids(item):
|
||||
url_parsed = requests.utils.urlparse(guid_tag.id)
|
||||
|
@ -278,12 +253,13 @@ class Convert:
|
|||
util.print_stacktrace()
|
||||
raise Failed("No External GUIDs found")
|
||||
if not tvdb_id and not imdb_id and not tmdb_id:
|
||||
library.query(item.refresh)
|
||||
raise Failed("Refresh Metadata")
|
||||
elif item_type == "imdb": imdb_id = check_id
|
||||
elif item_type == "thetvdb": tvdb_id = int(check_id)
|
||||
elif item_type == "themoviedb": tmdb_id = int(check_id)
|
||||
elif item_type == "imdb": imdb_id.append(check_id)
|
||||
elif item_type == "thetvdb": tvdb_id.append(int(check_id))
|
||||
elif item_type == "themoviedb": tmdb_id.append(int(check_id))
|
||||
elif item_type == "hama":
|
||||
if check_id.startswith("tvdb"): tvdb_id = int(re.search("-(.*)", check_id).group(1))
|
||||
if check_id.startswith("tvdb"): tvdb_id.append(int(re.search("-(.*)", check_id).group(1)))
|
||||
elif check_id.startswith("anidb"): anidb_id = re.search("-(.*)", check_id).group(1)
|
||||
else: raise Failed(f"Hama Agent ID: {check_id} not supported")
|
||||
elif item_type == "myanimelist":
|
||||
|
@ -294,72 +270,63 @@ class Convert:
|
|||
else: raise Failed(f"Agent {item_type} not supported")
|
||||
|
||||
if anidb_id:
|
||||
tvdb_id = self.anidb_to_tvdb(anidb_id)
|
||||
if not tvdb_id:
|
||||
imdb_id = self.anidb_to_imdb(anidb_id)
|
||||
if not imdb_id and not tvdb_id:
|
||||
raise Failed(f"Unable to convert AniDB ID: {anidb_id} to TVDb ID or IMDb ID")
|
||||
|
||||
if not tmdb_id and imdb_id:
|
||||
if isinstance(imdb_id, list):
|
||||
tmdb_id = []
|
||||
ani_tvdb, ani_imdb, ani_tmdb = self._anidb(anidb_id, fail=True)
|
||||
if ani_imdb:
|
||||
imdb_id.extend(ani_imdb)
|
||||
if ani_tmdb:
|
||||
tmdb_id.extend(ani_tmdb)
|
||||
if ani_tvdb:
|
||||
tvdb_id.append(ani_tvdb)
|
||||
else:
|
||||
if not tmdb_id and imdb_id:
|
||||
for imdb in imdb_id:
|
||||
try:
|
||||
tmdb_id.append(self.imdb_to_tmdb(imdb, fail=True))
|
||||
except Failed:
|
||||
continue
|
||||
else:
|
||||
tmdb_id = self.imdb_to_tmdb(imdb_id)
|
||||
if not tmdb_id:
|
||||
raise Failed(f"Unable to convert IMDb ID: {util.compile_list(imdb_id)} to TMDb ID")
|
||||
if not anidb_id and not tvdb_id and tmdb_id and library.is_show:
|
||||
if isinstance(tmdb_id, list):
|
||||
tvdb_id = []
|
||||
tmdb, tmdb_type = self.imdb_to_tmdb(imdb)
|
||||
if tmdb and ((tmdb_type == "movie" and library.is_movie) or (tmdb_type == "show" and library.is_show)):
|
||||
tmdb_id.append(tmdb)
|
||||
|
||||
if not imdb_id and tmdb_id and library.is_movie:
|
||||
for tmdb in tmdb_id:
|
||||
try:
|
||||
tvdb_id.append(self.tmdb_to_tvdb(tmdb, fail=True))
|
||||
except Failed:
|
||||
continue
|
||||
else:
|
||||
tvdb_id = self.tmdb_to_tvdb(tmdb_id)
|
||||
if not tvdb_id:
|
||||
raise Failed(f"Unable to convert TMDb ID: {util.compile_list(tmdb_id)} to TVDb ID")
|
||||
imdb = self.tmdb_to_imdb(tmdb)
|
||||
if imdb:
|
||||
imdb_id.append(imdb)
|
||||
|
||||
if tvdb_id:
|
||||
if isinstance(tvdb_id, list):
|
||||
new_tvdb_id = []
|
||||
for tvdb in tvdb_id:
|
||||
try:
|
||||
new_tvdb_id.append(int(tvdb))
|
||||
except ValueError:
|
||||
continue
|
||||
tvdb_id = new_tvdb_id
|
||||
else:
|
||||
try:
|
||||
tvdb_id = int(tvdb_id)
|
||||
except ValueError:
|
||||
tvdb_id = None
|
||||
if not tvdb_id and tmdb_id and library.is_show:
|
||||
for tmdb in tmdb_id:
|
||||
tvdb = self.tmdb_to_tvdb(tmdb)
|
||||
if tvdb:
|
||||
tvdb_id.append(tvdb)
|
||||
if not tvdb_id:
|
||||
raise Failed(f"Unable to convert TMDb ID: {', '.join([str(t) for t in tmdb_id])} to TVDb ID")
|
||||
|
||||
def update_cache(cache_ids, id_type, guid_type):
|
||||
if not imdb_id and tvdb_id:
|
||||
for tvdb in tvdb_id:
|
||||
imdb = self.tvdb_to_imdb(tvdb)
|
||||
if imdb:
|
||||
imdb_id.append(imdb)
|
||||
|
||||
def update_cache(cache_ids, id_type, imdb_in, guid_type):
|
||||
if self.config.Cache:
|
||||
cache_ids = util.compile_list(cache_ids)
|
||||
logger.info(util.adjust_space(f" Cache | {'^' if expired else '+'} | {item.guid:<46} | {id_type} ID: {cache_ids:<6} | {item.title}"))
|
||||
self.config.Cache.update_guid_map(guid_type, item.guid, cache_ids, expired)
|
||||
cache_ids = ",".join([str(c) for c in cache_ids])
|
||||
imdb_in = ",".join([str(i) for i in imdb_in]) if imdb_in else None
|
||||
ids = f"{item.guid:<46} | {id_type} ID: {cache_ids:<7} | IMDb ID: {str(imdb_in):<10}"
|
||||
logger.info(util.adjust_space(f" Cache | {'^' if expired else '+'} | {ids} | {item.title}"))
|
||||
self.config.Cache.update_guid_map(item.guid, cache_ids, imdb_in, expired, guid_type)
|
||||
|
||||
if tmdb_id and library.is_movie:
|
||||
update_cache(tmdb_id, "TMDb", "movie")
|
||||
return "movie", tmdb_id
|
||||
update_cache(tmdb_id, "TMDb", imdb_id, "movie")
|
||||
return "movie", tmdb_id, imdb_id
|
||||
elif tvdb_id and library.is_show:
|
||||
update_cache(tvdb_id, "TVDb", "show")
|
||||
return "show", tvdb_id
|
||||
update_cache(tvdb_id, "TVDb", imdb_id, "show")
|
||||
return "show", tvdb_id, imdb_id
|
||||
elif anidb_id and tmdb_id and library.is_show:
|
||||
update_cache(tmdb_id, "TMDb", "show_movie")
|
||||
return "movie", tmdb_id
|
||||
update_cache(tmdb_id, "TMDb", imdb_id, "show_movie")
|
||||
return "movie", tmdb_id, imdb_id
|
||||
else:
|
||||
logger.debug(f"TMDb: {tmdb_id}, IMDb: {imdb_id}, TVDb: {tvdb_id}")
|
||||
raise Failed(f"No ID to convert")
|
||||
except Failed as e:
|
||||
logger.info(util.adjust_space(f"Mapping Error | {item.guid:<46} | {e} for {item.title}"))
|
||||
except BadRequest:
|
||||
util.print_stacktrace()
|
||||
logger.info(util.adjust_space(f"Mapping Error | {item.guid:<46} | Bad Request for {item.title}"))
|
||||
return None, None
|
||||
return None, None, None
|
||||
|
|
|
@ -1,55 +1,42 @@
|
|||
import logging, requests
|
||||
from lxml import html
|
||||
import logging
|
||||
from modules import util
|
||||
from modules.util import Failed
|
||||
from retrying import retry
|
||||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
builders = ["icheckmovies_list", "icheckmovies_list_details"]
|
||||
base_url = "https://www.icheckmovies.com/lists/"
|
||||
|
||||
class ICheckMovies:
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.list_url = "https://www.icheckmovies.com/lists/"
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||
def _request(self, url, language):
|
||||
return html.fromstring(requests.get(url, headers={"Accept-Language": language, "User-Agent": "Mozilla/5.0 x64"}).content)
|
||||
def _request(self, url, language, xpath):
|
||||
return self.config.get_html(url, headers=util.header(language)).xpath(xpath)
|
||||
|
||||
def _parse_list(self, list_url, language):
|
||||
response = self._request(list_url, language)
|
||||
imdb_urls = response.xpath("//a[@class='optionIcon optionIMDB external']/@href")
|
||||
return [t[t.find("/tt") + 1:-1] for t in imdb_urls]
|
||||
imdb_urls = self._request(list_url, language, "//a[@class='optionIcon optionIMDB external']/@href")
|
||||
return [(t[t.find("/tt") + 1:-1], "imdb") for t in imdb_urls]
|
||||
|
||||
def get_list_description(self, list_url, language):
|
||||
descriptions = self._request(list_url, language).xpath("//div[@class='span-19 last']/p/em/text()")
|
||||
descriptions = self._request(list_url, language, "//div[@class='span-19 last']/p/em/text()")
|
||||
return descriptions[0] if len(descriptions) > 0 and len(descriptions[0]) > 0 else None
|
||||
|
||||
def validate_icheckmovies_list(self, list_url, language):
|
||||
list_url = list_url.strip()
|
||||
if not list_url.startswith(self.list_url):
|
||||
raise Failed(f"ICheckMovies Error: {list_url} must begin with: {self.list_url}")
|
||||
if len(self._parse_list(list_url, language)) > 0:
|
||||
return list_url
|
||||
raise Failed(f"ICheckMovies Error: {list_url} failed to parse")
|
||||
def validate_icheckmovies_lists(self, icheckmovies_lists, language):
|
||||
valid_lists = []
|
||||
for icheckmovies_list in util.get_list(icheckmovies_lists, split=False):
|
||||
list_url = icheckmovies_list.strip()
|
||||
if not list_url.startswith(base_url):
|
||||
raise Failed(f"ICheckMovies Error: {list_url} must begin with: {base_url}")
|
||||
elif len(self._parse_list(list_url, language)) > 0:
|
||||
valid_lists.append(list_url)
|
||||
else:
|
||||
raise Failed(f"ICheckMovies Error: {list_url} failed to parse")
|
||||
return valid_lists
|
||||
|
||||
def get_items(self, method, data, language):
|
||||
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||
movie_ids = []
|
||||
def get_icheckmovies_ids(self, method, data, language):
|
||||
if method == "icheckmovies_list":
|
||||
logger.info(f"Processing {pretty}: {data}")
|
||||
imdb_ids = self._parse_list(data, language)
|
||||
total_ids = len(imdb_ids)
|
||||
for i, imdb_id in enumerate(imdb_ids, 1):
|
||||
try:
|
||||
util.print_return(f"Converting IMDb ID {i}/{total_ids}")
|
||||
movie_ids.append(self.config.Convert.imdb_to_tmdb(imdb_id))
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
logger.info(util.adjust_space(f"Processed {total_ids} IMDb IDs"))
|
||||
logger.info(f"Processing ICheckMovies List: {data}")
|
||||
return self._parse_list(data, language)
|
||||
else:
|
||||
raise Failed(f"ICheckMovies Error: Method {method} not supported")
|
||||
logger.debug("")
|
||||
logger.debug(f"{len(movie_ids)} TMDb IDs Found: {movie_ids}")
|
||||
return movie_ids, []
|
||||
|
|
170
modules/imdb.py
170
modules/imdb.py
|
@ -1,125 +1,107 @@
|
|||
import logging, math, re, requests
|
||||
from lxml import html
|
||||
import logging, math, re, time
|
||||
from modules import util
|
||||
from modules.util import Failed
|
||||
from retrying import retry
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
builders = ["imdb_list", "imdb_id"]
|
||||
base_url = "https://www.imdb.com"
|
||||
urls = {
|
||||
"list": f"{base_url}/list/ls",
|
||||
"search": f"{base_url}/search/title/",
|
||||
"keyword": f"{base_url}/search/keyword/"
|
||||
}
|
||||
xpath = {
|
||||
"imdb_id": "//div[contains(@class, 'lister-item-image')]//a/img//@data-tconst",
|
||||
"list": "//div[@class='desc lister-total-num-results']/text()",
|
||||
"search": "//div[@class='desc']/span/text()",
|
||||
"keyword": "//div[@class='desc']/text()"
|
||||
}
|
||||
item_counts = {"list": 100, "search": 250, "keyword": 50}
|
||||
|
||||
class IMDb:
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.urls = {
|
||||
"list": "https://www.imdb.com/list/ls",
|
||||
"search": "https://www.imdb.com/search/title/?",
|
||||
"keyword": "https://www.imdb.com/search/keyword/?"
|
||||
}
|
||||
|
||||
def validate_imdb_url(self, imdb_url, language):
|
||||
imdb_url = imdb_url.strip()
|
||||
if not imdb_url.startswith(self.urls["list"]) and not imdb_url.startswith(self.urls["search"]) and not imdb_url.startswith(self.urls["keyword"]):
|
||||
raise Failed(f"IMDb Error: {imdb_url} must begin with either:\n{self.urls['list']} (For Lists)\n{self.urls['search']} (For Searches)\n{self.urls['keyword']} (For Keyword Searches)")
|
||||
total, _ = self._total(self._fix_url(imdb_url), language)
|
||||
if total > 0:
|
||||
return imdb_url
|
||||
raise Failed(f"IMDb Error: {imdb_url} failed to parse")
|
||||
|
||||
def _fix_url(self, imdb_url):
|
||||
if imdb_url.startswith(self.urls["list"]):
|
||||
try: list_id = re.search("(\\d+)", str(imdb_url)).group(1)
|
||||
except AttributeError: raise Failed(f"IMDb Error: Failed to parse List ID from {imdb_url}")
|
||||
return f"{self.urls['search']}lists=ls{list_id}"
|
||||
elif imdb_url.endswith("/"):
|
||||
return imdb_url[:-1]
|
||||
else:
|
||||
return imdb_url
|
||||
def validate_imdb_lists(self, imdb_lists, language):
|
||||
valid_lists = []
|
||||
for imdb_dict in util.get_list(imdb_lists, split=False):
|
||||
if not isinstance(imdb_dict, dict):
|
||||
imdb_dict = {"url": imdb_dict}
|
||||
dict_methods = {dm.lower(): dm for dm in imdb_dict}
|
||||
imdb_url = util.parse("url", imdb_dict, methods=dict_methods, parent="imdb_list").strip()
|
||||
if not imdb_url.startswith((urls["list"], urls["search"], urls["keyword"])):
|
||||
raise Failed(f"IMDb Error: {imdb_url} must begin with either:\n{urls['list']} (For Lists)\n{urls['search']} (For Searches)\n{urls['keyword']} (For Keyword Searches)")
|
||||
self._total(imdb_url, language)
|
||||
list_count = util.parse("limit", imdb_dict, datatype="int", methods=dict_methods, default=0, parent="imdb_list", minimum=0) if "limit" in dict_methods else 0
|
||||
valid_lists.append({"url": imdb_url, "limit": list_count})
|
||||
return valid_lists
|
||||
|
||||
def _total(self, imdb_url, language):
|
||||
header = {"Accept-Language": language}
|
||||
if imdb_url.startswith(self.urls["keyword"]):
|
||||
results = self._request(imdb_url, header).xpath("//div[@class='desc']/text()")
|
||||
total = None
|
||||
for result in results:
|
||||
if "title" in result:
|
||||
try:
|
||||
total = int(re.findall("(\\d+) title", result)[0])
|
||||
break
|
||||
except IndexError:
|
||||
pass
|
||||
if total is None:
|
||||
raise Failed(f"IMDb Error: No Results at URL: {imdb_url}")
|
||||
return total, 50
|
||||
headers = util.header(language)
|
||||
if imdb_url.startswith(urls["keyword"]):
|
||||
page_type = "keyword"
|
||||
elif imdb_url.startswith(urls["list"]):
|
||||
page_type = "list"
|
||||
else:
|
||||
try: results = self._request(imdb_url, header).xpath("//div[@class='desc']/span/text()")[0].replace(",", "")
|
||||
except IndexError: raise Failed(f"IMDb Error: Failed to parse URL: {imdb_url}")
|
||||
try: total = int(re.findall("(\\d+) title", results)[0])
|
||||
except IndexError: raise Failed(f"IMDb Error: No Results at URL: {imdb_url}")
|
||||
return total, 250
|
||||
page_type = "search"
|
||||
results = self.config.get_html(imdb_url, headers=headers).xpath(xpath[page_type])
|
||||
total = 0
|
||||
for result in results:
|
||||
if "title" in result:
|
||||
try:
|
||||
total = int(re.findall("(\\d+) title", result.replace(",", ""))[0])
|
||||
break
|
||||
except IndexError:
|
||||
pass
|
||||
if total > 0:
|
||||
return total, item_counts[page_type]
|
||||
raise ValueError(f"IMDb Error: Failed to parse URL: {imdb_url}")
|
||||
|
||||
def _ids_from_url(self, imdb_url, language, limit):
|
||||
current_url = self._fix_url(imdb_url)
|
||||
total, item_count = self._total(current_url, language)
|
||||
header = {"Accept-Language": language}
|
||||
total, item_count = self._total(imdb_url, language)
|
||||
headers = util.header(language)
|
||||
imdb_ids = []
|
||||
if "&start=" in current_url: current_url = re.sub("&start=\\d+", "", current_url)
|
||||
if "&count=" in current_url: current_url = re.sub("&count=\\d+", "", current_url)
|
||||
if "&page=" in current_url: current_url = re.sub("&page=\\d+", "", current_url)
|
||||
if limit < 1 or total < limit: limit = total
|
||||
parsed_url = urlparse(imdb_url)
|
||||
params = parse_qs(parsed_url.query)
|
||||
imdb_base = parsed_url._replace(query=None).geturl()
|
||||
params.pop("start", None)
|
||||
params.pop("count", None)
|
||||
params.pop("page", None)
|
||||
|
||||
if limit < 1 or total < limit:
|
||||
limit = total
|
||||
remainder = limit % item_count
|
||||
if remainder == 0: remainder = item_count
|
||||
if remainder == 0:
|
||||
remainder = item_count
|
||||
num_of_pages = math.ceil(int(limit) / item_count)
|
||||
for i in range(1, num_of_pages + 1):
|
||||
start_num = (i - 1) * item_count + 1
|
||||
util.print_return(f"Parsing Page {i}/{num_of_pages} {start_num}-{limit if i == num_of_pages else i * item_count}")
|
||||
if imdb_url.startswith(self.urls["keyword"]):
|
||||
response = self._request(f"{current_url}&page={i}", header)
|
||||
if imdb_base.startswith((urls["list"], urls["keyword"])):
|
||||
params["page"] = i
|
||||
else:
|
||||
response = self._request(f"{current_url}&count={remainder if i == num_of_pages else item_count}&start={start_num}", header)
|
||||
if imdb_url.startswith(self.urls["keyword"]) and i == num_of_pages:
|
||||
imdb_ids.extend(response.xpath("//div[contains(@class, 'lister-item-image')]//a/img//@data-tconst")[:remainder])
|
||||
else:
|
||||
imdb_ids.extend(response.xpath("//div[contains(@class, 'lister-item-image')]//a/img//@data-tconst"))
|
||||
params["count"] = remainder if i == num_of_pages else item_count
|
||||
params["start"] = start_num
|
||||
ids_found = self.config.get_html(imdb_base, headers=headers, params=params).xpath(xpath["imdb_id"])
|
||||
if imdb_base.startswith((urls["list"], urls["keyword"])) and i == num_of_pages:
|
||||
ids_found = ids_found[:remainder]
|
||||
imdb_ids.extend(ids_found)
|
||||
time.sleep(2)
|
||||
util.print_end()
|
||||
if imdb_ids: return imdb_ids
|
||||
else: raise Failed(f"IMDb Error: No IMDb IDs Found at {imdb_url}")
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||
def _request(self, url, header):
|
||||
return html.fromstring(requests.get(url, headers=header).content)
|
||||
|
||||
def get_items(self, method, data, language, is_movie):
|
||||
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||
show_ids = []
|
||||
movie_ids = []
|
||||
fail_ids = []
|
||||
def run_convert(imdb_id):
|
||||
tvdb_id = self.config.Convert.imdb_to_tvdb(imdb_id) if not is_movie else None
|
||||
tmdb_id = self.config.Convert.imdb_to_tmdb(imdb_id) if tvdb_id is None else None
|
||||
if tmdb_id: movie_ids.append(tmdb_id)
|
||||
elif tvdb_id: show_ids.append(tvdb_id)
|
||||
else:
|
||||
logger.error(f"Convert Error: No {'' if is_movie else 'TVDb ID or '}TMDb ID found for IMDb: {imdb_id}")
|
||||
fail_ids.append(imdb_id)
|
||||
if len(imdb_ids) > 0:
|
||||
logger.debug(f"{len(imdb_ids)} IMDb IDs Found: {imdb_ids}")
|
||||
return imdb_ids
|
||||
raise ValueError(f"IMDb Error: No IMDb IDs Found at {imdb_url}")
|
||||
|
||||
def get_imdb_ids(self, method, data, language):
|
||||
if method == "imdb_id":
|
||||
logger.info(f"Processing {pretty}: {data}")
|
||||
run_convert(data)
|
||||
logger.info(f"Processing IMDb ID: {data}")
|
||||
return [(data, "imdb")]
|
||||
elif method == "imdb_list":
|
||||
status = f"{data['limit']} Items at " if data['limit'] > 0 else ''
|
||||
logger.info(f"Processing {pretty}: {status}{data['url']}")
|
||||
imdb_ids = self._ids_from_url(data["url"], language, data["limit"])
|
||||
total_ids = len(imdb_ids)
|
||||
for i, imdb in enumerate(imdb_ids, 1):
|
||||
util.print_return(f"Converting IMDb ID {i}/{total_ids}")
|
||||
run_convert(imdb)
|
||||
logger.info(util.adjust_space(f"Processed {total_ids} IMDb IDs"))
|
||||
logger.info(f"Processing IMDb List: {status}{data['url']}")
|
||||
return [(i, "imdb") for i in self._ids_from_url(data["url"], language, data["limit"])]
|
||||
else:
|
||||
raise Failed(f"IMDb Error: Method {method} not supported")
|
||||
logger.debug("")
|
||||
logger.debug(f"{len(fail_ids)} IMDb IDs Failed to Convert: {fail_ids}")
|
||||
logger.debug(f"{len(movie_ids)} TMDb IDs Found: {movie_ids}")
|
||||
logger.debug(f"{len(show_ids)} TVDb IDs Found: {show_ids}")
|
||||
return movie_ids, show_ids
|
||||
|
|
|
@ -1,36 +1,31 @@
|
|||
import logging, requests
|
||||
from lxml import html
|
||||
import logging, time
|
||||
from modules import util
|
||||
from modules.util import Failed
|
||||
from retrying import retry
|
||||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
builders = ["letterboxd_list", "letterboxd_list_details"]
|
||||
base_url = "https://letterboxd.com"
|
||||
|
||||
class Letterboxd:
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.url = "https://letterboxd.com"
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||
def _request(self, url, language):
|
||||
return html.fromstring(requests.get(url, headers={"Accept-Language": language, "User-Agent": "Mozilla/5.0 x64"}).content)
|
||||
|
||||
def _parse_list(self, list_url, language):
|
||||
response = self._request(list_url, language)
|
||||
letterboxd_ids = response.xpath("//div[@class='poster film-poster really-lazy-load']/@data-film-id")
|
||||
response = self.config.get_html(list_url, headers=util.header(language))
|
||||
letterboxd_ids = response.xpath("//li[contains(@class, 'poster-container')]/div/@data-film-id")
|
||||
items = []
|
||||
for letterboxd_id in letterboxd_ids:
|
||||
slugs = response.xpath(f"//div[@data-film-id='{letterboxd_id}']/@data-film-slug")
|
||||
items.append((letterboxd_id, slugs[0]))
|
||||
next_url = response.xpath("//a[@class='next']/@href")
|
||||
if len(next_url) > 0:
|
||||
items.extend(self._parse_list(f"{self.url}{next_url[0]}", language))
|
||||
time.sleep(2)
|
||||
items.extend(self._parse_list(f"{base_url}{next_url[0]}", language))
|
||||
return items
|
||||
|
||||
def _tmdb(self, letterboxd_url, language):
|
||||
response = self._request(letterboxd_url, language)
|
||||
response = self.config.get_html(letterboxd_url, headers=util.header(language))
|
||||
ids = response.xpath("//a[@data-track-action='TMDb']/@href")
|
||||
if len(ids) > 0 and ids[0]:
|
||||
if "themoviedb.org/movie" in ids[0]:
|
||||
|
@ -39,35 +34,48 @@ class Letterboxd:
|
|||
raise Failed(f"Letterboxd Error: TMDb Movie ID not found at {letterboxd_url}")
|
||||
|
||||
def get_list_description(self, list_url, language):
|
||||
descriptions = self._request(list_url, language).xpath("//meta[@property='og:description']/@content")
|
||||
response = self.config.get_html(list_url, headers=util.header(language))
|
||||
descriptions = response.xpath("//meta[@property='og:description']/@content")
|
||||
return descriptions[0] if len(descriptions) > 0 and len(descriptions[0]) > 0 else None
|
||||
|
||||
def get_items(self, method, data, language):
|
||||
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||
movie_ids = []
|
||||
logger.info(f"Processing {pretty}: {data}")
|
||||
items = self._parse_list(data, language)
|
||||
total_items = len(items)
|
||||
if total_items > 0:
|
||||
for i, item in enumerate(items, 1):
|
||||
letterboxd_id, slug = item
|
||||
util.print_return(f"Finding TMDb ID {i}/{total_items}")
|
||||
tmdb_id = None
|
||||
expired = None
|
||||
if self.config.Cache:
|
||||
tmdb_id, expired = self.config.Cache.query_letterboxd_map(letterboxd_id)
|
||||
if not tmdb_id or expired is not False:
|
||||
try:
|
||||
tmdb_id = self._tmdb(f"{self.url}{slug}", language)
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
continue
|
||||
def validate_letterboxd_lists(self, letterboxd_lists, language):
|
||||
valid_lists = []
|
||||
for letterboxd_list in util.get_list(letterboxd_lists, split=False):
|
||||
list_url = letterboxd_list.strip()
|
||||
if not list_url.startswith(base_url):
|
||||
raise Failed(f"Letterboxd Error: {list_url} must begin with: {base_url}")
|
||||
elif len(self._parse_list(list_url, language)) > 0:
|
||||
valid_lists.append(list_url)
|
||||
else:
|
||||
raise Failed(f"Letterboxd Error: {list_url} failed to parse")
|
||||
return valid_lists
|
||||
|
||||
def get_tmdb_ids(self, method, data, language):
|
||||
if method == "letterboxd_list":
|
||||
logger.info(f"Processing Letterboxd List: {data}")
|
||||
items = self._parse_list(data, language)
|
||||
total_items = len(items)
|
||||
if total_items > 0:
|
||||
ids = []
|
||||
for i, item in enumerate(items, 1):
|
||||
letterboxd_id, slug = item
|
||||
util.print_return(f"Finding TMDb ID {i}/{total_items}")
|
||||
tmdb_id = None
|
||||
expired = None
|
||||
if self.config.Cache:
|
||||
self.config.Cache.update_letterboxd_map(expired, letterboxd_id, tmdb_id)
|
||||
movie_ids.append(tmdb_id)
|
||||
logger.info(util.adjust_space(f"Processed {total_items} TMDb IDs"))
|
||||
tmdb_id, expired = self.config.Cache.query_letterboxd_map(letterboxd_id)
|
||||
if not tmdb_id or expired is not False:
|
||||
try:
|
||||
tmdb_id = self._tmdb(f"{base_url}{slug}", language)
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
continue
|
||||
if self.config.Cache:
|
||||
self.config.Cache.update_letterboxd_map(expired, letterboxd_id, tmdb_id)
|
||||
ids.append((tmdb_id, "tmdb"))
|
||||
logger.info(util.adjust_space(f"Processed {total_items} TMDb IDs"))
|
||||
return ids
|
||||
else:
|
||||
raise Failed(f"Letterboxd Error: No List Items found in {data}")
|
||||
else:
|
||||
logger.error(f"Letterboxd Error: No List Items found in {data}")
|
||||
logger.debug("")
|
||||
logger.debug(f"{len(movie_ids)} TMDb IDs Found: {movie_ids}")
|
||||
return movie_ids, []
|
||||
raise Failed(f"Letterboxd Error: Method {method} not supported")
|
||||
|
|
199
modules/mal.py
199
modules/mal.py
|
@ -1,99 +1,64 @@
|
|||
import logging, re, requests, secrets, webbrowser
|
||||
import logging, math, re, secrets, time, webbrowser
|
||||
from modules import util
|
||||
from modules.util import Failed, TimeoutExpired
|
||||
from retrying import retry
|
||||
from ruamel import yaml
|
||||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
builders = [
|
||||
"mal_id",
|
||||
"mal_all",
|
||||
"mal_airing",
|
||||
"mal_upcoming",
|
||||
"mal_tv",
|
||||
"mal_ova",
|
||||
"mal_movie",
|
||||
"mal_special",
|
||||
"mal_popular",
|
||||
"mal_favorite",
|
||||
"mal_season",
|
||||
"mal_suggested",
|
||||
"mal_userlist"
|
||||
"mal_id", "mal_all", "mal_airing", "mal_upcoming", "mal_tv", "mal_ova", "mal_movie", "mal_special",
|
||||
"mal_popular", "mal_favorite", "mal_season", "mal_suggested", "mal_userlist", "mal_genre", "mal_producer"
|
||||
]
|
||||
mal_ranked_name = {
|
||||
"mal_all": "all",
|
||||
"mal_airing": "airing",
|
||||
"mal_upcoming": "upcoming",
|
||||
"mal_tv": "tv",
|
||||
"mal_ova": "ova",
|
||||
"mal_movie": "movie",
|
||||
"mal_special": "special",
|
||||
"mal_popular": "bypopularity",
|
||||
"mal_favorite": "favorite"
|
||||
"mal_all": "all", "mal_airing": "airing", "mal_upcoming": "upcoming", "mal_tv": "tv", "mal_ova": "ova",
|
||||
"mal_movie": "movie", "mal_special": "special", "mal_popular": "bypopularity", "mal_favorite": "favorite"
|
||||
}
|
||||
season_sort = {
|
||||
"anime_score": "anime_score",
|
||||
"anime_num_list_users": "anime_num_list_users",
|
||||
"score": "anime_score",
|
||||
"members": "anime_num_list_users"
|
||||
mal_ranked_pretty = {
|
||||
"mal_all": "MyAnimeList All", "mal_airing": "MyAnimeList Airing",
|
||||
"mal_upcoming": "MyAnimeList Upcoming", "mal_tv": "MyAnimeList TV", "mal_ova": "MyAnimeList OVA",
|
||||
"mal_movie": "MyAnimeList Movie", "mal_special": "MyAnimeList Special", "mal_popular": "MyAnimeList Popular",
|
||||
"mal_favorite": "MyAnimeList Favorite", "mal_genre": "MyAnimeList Genre", "mal_producer": "MyAnimeList Producer"
|
||||
}
|
||||
season_sort_translation = {"score": "anime_score", "anime_score": "anime_score", "members": "anime_num_list_users", "anime_num_list_users": "anime_num_list_users"}
|
||||
season_sort_options = ["score", "members"]
|
||||
pretty_names = {
|
||||
"anime_score": "Score",
|
||||
"anime_num_list_users": "Members",
|
||||
"list_score": "Score",
|
||||
"list_updated_at": "Last Updated",
|
||||
"anime_title": "Title",
|
||||
"anime_start_date": "Start Date",
|
||||
"all": "All Anime",
|
||||
"watching": "Currently Watching",
|
||||
"completed": "Completed",
|
||||
"on_hold": "On Hold",
|
||||
"dropped": "Dropped",
|
||||
"plan_to_watch": "Plan to Watch"
|
||||
"anime_score": "Score", "list_score": "Score", "anime_num_list_users": "Members", "list_updated_at": "Last Updated",
|
||||
"anime_title": "Title", "anime_start_date": "Start Date", "all": "All Anime", "watching": "Currently Watching",
|
||||
"completed": "Completed", "on_hold": "On Hold", "dropped": "Dropped", "plan_to_watch": "Plan to Watch"
|
||||
}
|
||||
userlist_sort = {
|
||||
"score": "list_score",
|
||||
"list_score": "list_score",
|
||||
"last_updated": "list_updated_at",
|
||||
"list_updated": "list_updated_at",
|
||||
"list_updated_at": "list_updated_at",
|
||||
"title": "anime_title",
|
||||
"anime_title": "anime_title",
|
||||
"start_date": "anime_start_date",
|
||||
"anime_start_date": "anime_start_date"
|
||||
userlist_sort_translation = {
|
||||
"score": "list_score", "list_score": "list_score",
|
||||
"last_updated": "list_updated_at", "list_updated": "list_updated_at", "list_updated_at": "list_updated_at",
|
||||
"title": "anime_title", "anime_title": "anime_title",
|
||||
"start_date": "anime_start_date", "anime_start_date": "anime_start_date"
|
||||
}
|
||||
userlist_sort_options = ["score", "last_updated", "title", "start_date"]
|
||||
userlist_status = ["all", "watching", "completed", "on_hold", "dropped", "plan_to_watch"]
|
||||
base_url = "https://api.myanimelist.net"
|
||||
jiken_base_url = "https://api.jikan.moe/v3"
|
||||
urls = {
|
||||
"oauth_token": f"https://myanimelist.net/v1/oauth2/token",
|
||||
"oauth_authorize": f"https://myanimelist.net/v1/oauth2/authorize",
|
||||
"ranking": f"{base_url}/v2/anime/ranking",
|
||||
"season": f"{base_url}/v2/anime/season",
|
||||
"suggestions": f"{base_url}/v2/anime/suggestions",
|
||||
"user": f"{base_url}/v2/users"
|
||||
}
|
||||
userlist_status = [
|
||||
"all",
|
||||
"watching",
|
||||
"completed",
|
||||
"on_hold",
|
||||
"dropped",
|
||||
"plan_to_watch"
|
||||
]
|
||||
|
||||
class MyAnimeList:
|
||||
def __init__(self, params, config, authorization=None):
|
||||
def __init__(self, config, params):
|
||||
self.config = config
|
||||
self.urls = {
|
||||
"oauth_token": "https://myanimelist.net/v1/oauth2/token",
|
||||
"oauth_authorize": "https://myanimelist.net/v1/oauth2/authorize",
|
||||
"ranking": "https://api.myanimelist.net/v2/anime/ranking",
|
||||
"season": "https://api.myanimelist.net/v2/anime/season",
|
||||
"suggestions": "https://api.myanimelist.net/v2/anime/suggestions",
|
||||
"user": "https://api.myanimelist.net/v2/users"
|
||||
}
|
||||
self.client_id = params["client_id"]
|
||||
self.client_secret = params["client_secret"]
|
||||
self.config_path = params["config_path"]
|
||||
self.authorization = authorization
|
||||
self.authorization = params["authorization"]
|
||||
if not self._save(self.authorization):
|
||||
if not self._refresh():
|
||||
self._authorization()
|
||||
|
||||
def _authorization(self):
|
||||
code_verifier = secrets.token_urlsafe(100)[:128]
|
||||
url = f"{self.urls['oauth_authorize']}?response_type=code&client_id={self.client_id}&code_challenge={code_verifier}"
|
||||
url = f"{urls['oauth_authorize']}?response_type=code&client_id={self.client_id}&code_challenge={code_verifier}"
|
||||
logger.info("")
|
||||
logger.info(f"Navigate to: {url}")
|
||||
logger.info("")
|
||||
|
@ -122,7 +87,7 @@ class MyAnimeList:
|
|||
|
||||
def _check(self, authorization):
|
||||
try:
|
||||
self._request(self.urls["suggestions"], authorization=authorization)
|
||||
self._request(urls["suggestions"], authorization=authorization)
|
||||
return True
|
||||
except Failed as e:
|
||||
logger.debug(e)
|
||||
|
@ -158,63 +123,115 @@ class MyAnimeList:
|
|||
return True
|
||||
return False
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||
def _oauth(self, data):
|
||||
return requests.post(self.urls["oauth_token"], data).json()
|
||||
return self.config.post_json(urls["oauth_token"], data=data)
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||
def _request(self, url, authorization=None):
|
||||
new_authorization = authorization if authorization else self.authorization
|
||||
response = requests.get(url, headers={"Authorization": f"Bearer {new_authorization['access_token']}"}).json()
|
||||
response = self.config.get_json(url, headers={"Authorization": f"Bearer {new_authorization['access_token']}"})
|
||||
if "error" in response: raise Failed(f"MyAnimeList Error: {response['error']}")
|
||||
else: return response
|
||||
|
||||
def _jiken_request(self, url):
|
||||
data = self.config.get_json(f"{jiken_base_url}{url}")
|
||||
time.sleep(2)
|
||||
return data
|
||||
|
||||
def _parse_request(self, url):
|
||||
data = self._request(url)
|
||||
return [d["node"]["id"] for d in data["data"]] if "data" in data else []
|
||||
|
||||
def _username(self):
|
||||
return self._request(f"{self.urls['user']}/@me")["name"]
|
||||
return self._request(f"{urls['user']}/@me")["name"]
|
||||
|
||||
def _ranked(self, ranking_type, limit):
|
||||
url = f"{self.urls['ranking']}?ranking_type={ranking_type}&limit={limit}"
|
||||
url = f"{urls['ranking']}?ranking_type={ranking_type}&limit={limit}"
|
||||
return self._parse_request(url)
|
||||
|
||||
def _season(self, season, year, sort_by, limit):
|
||||
url = f"{self.urls['season']}/{year}/{season}?sort={sort_by}&limit={limit}"
|
||||
url = f"{urls['season']}/{year}/{season}?sort={sort_by}&limit={limit}"
|
||||
return self._parse_request(url)
|
||||
|
||||
def _suggestions(self, limit):
|
||||
url = f"{self.urls['suggestions']}?limit={limit}"
|
||||
url = f"{urls['suggestions']}?limit={limit}"
|
||||
return self._parse_request(url)
|
||||
|
||||
def _userlist(self, username, status, sort_by, limit):
|
||||
final_status = "" if status == "all" else f"status={status}&"
|
||||
url = f"{self.urls['user']}/{username}/animelist?{final_status}sort={sort_by}&limit={limit}"
|
||||
url = f"{urls['user']}/{username}/animelist?{final_status}sort={sort_by}&limit={limit}"
|
||||
return self._parse_request(url)
|
||||
|
||||
def get_items(self, method, data):
|
||||
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||
def _genre(self, genre_id, limit):
|
||||
data = self._jiken_request(f"/genre/anime/{genre_id}")
|
||||
if "item_count" not in data:
|
||||
raise Failed(f"MyAnimeList Error: No MyAnimeList IDs for Genre ID: {genre_id}")
|
||||
total_items = data["item_count"]
|
||||
if total_items < limit or limit <= 0:
|
||||
limit = total_items
|
||||
mal_ids = []
|
||||
num_of_pages = math.ceil(int(limit) / 100)
|
||||
current_page = 1
|
||||
chances = 0
|
||||
while current_page <= num_of_pages:
|
||||
if chances > 6:
|
||||
logger.debug(data)
|
||||
raise Failed("AniList Error: Connection Failed")
|
||||
start_num = (current_page - 1) * 100 + 1
|
||||
util.print_return(f"Parsing Page {current_page}/{num_of_pages} {start_num}-{limit if current_page == num_of_pages else current_page * 100}")
|
||||
if current_page > 1:
|
||||
data = self._jiken_request(f"/genre/anime/{genre_id}/{current_page}")
|
||||
if "anime" in data:
|
||||
chances = 0
|
||||
mal_ids.extend([anime["mal_id"] for anime in data["anime"]])
|
||||
if len(mal_ids) > limit:
|
||||
return mal_ids[:limit]
|
||||
current_page += 1
|
||||
else:
|
||||
chances += 1
|
||||
util.print_end()
|
||||
return mal_ids
|
||||
|
||||
def _producer(self, producer_id, limit):
|
||||
data = self._jiken_request(f"/producer/{producer_id}")
|
||||
if "anime" not in data:
|
||||
raise Failed(f"MyAnimeList Error: No MyAnimeList IDs for Producer ID: {producer_id}")
|
||||
mal_ids = []
|
||||
count = 1
|
||||
while True:
|
||||
if count > 1:
|
||||
data = self._jiken_request(f"/producer/{producer_id}/{count}")
|
||||
if "anime" not in data:
|
||||
break
|
||||
mal_ids.extend([anime["mal_id"] for anime in data["anime"]])
|
||||
if len(mal_ids) > limit > 0:
|
||||
return mal_ids[:limit]
|
||||
count += 1
|
||||
return mal_ids
|
||||
|
||||
def get_mal_ids(self, method, data):
|
||||
if method == "mal_id":
|
||||
logger.info(f"Processing MyAnimeList ID: {data}")
|
||||
mal_ids = [data]
|
||||
logger.info(f"Processing {pretty}: {data}")
|
||||
elif method in mal_ranked_name:
|
||||
logger.info(f"Processing {mal_ranked_pretty[method]}: {data} Anime")
|
||||
mal_ids = self._ranked(mal_ranked_name[method], data)
|
||||
logger.info(f"Processing {pretty}: {data} Anime")
|
||||
elif method == "mal_genre":
|
||||
logger.info(f"Processing {mal_ranked_pretty[method]} ID: {data['genre_id']}")
|
||||
mal_ids = self._genre(data["genre_id"], data["limit"])
|
||||
elif method == "mal_producer":
|
||||
logger.info(f"Processing {mal_ranked_pretty[method]} ID: {data['producer_id']}")
|
||||
mal_ids = self._producer(data["producer_id"], data["limit"])
|
||||
elif method == "mal_season":
|
||||
logger.info(f"Processing MyAnimeList Season: {data['limit']} Anime from {data['season'].title()} {data['year']} sorted by {pretty_names[data['sort_by']]}")
|
||||
mal_ids = self._season(data["season"], data["year"], data["sort_by"], data["limit"])
|
||||
logger.info(f"Processing {pretty}: {data['limit']} Anime from {util.pretty_seasons[data['season']]} {data['year']} sorted by {pretty_names[data['sort_by']]}")
|
||||
elif method == "mal_suggested":
|
||||
logger.info(f"Processing MyAnimeList Suggested: {data} Anime")
|
||||
mal_ids = self._suggestions(data)
|
||||
logger.info(f"Processing {pretty}: {data} Anime")
|
||||
elif method == "mal_userlist":
|
||||
logger.info(f"Processing MyAnimeList Userlist: {data['limit']} Anime from {self._username() if data['username'] == '@me' else data['username']}'s {pretty_names[data['status']]} list sorted by {pretty_names[data['sort_by']]}")
|
||||
mal_ids = self._userlist(data["username"], data["status"], data["sort_by"], data["limit"])
|
||||
logger.info(f"Processing {pretty}: {data['limit']} Anime from {self._username() if data['username'] == '@me' else data['username']}'s {pretty_names[data['status']]} list sorted by {pretty_names[data['sort_by']]}")
|
||||
else:
|
||||
raise Failed(f"MyAnimeList Error: Method {method} not supported")
|
||||
movie_ids, show_ids = self.config.Convert.myanimelist_to_ids(mal_ids)
|
||||
logger.debug("")
|
||||
logger.debug(f"{len(mal_ids)} MyAnimeList IDs Found: {mal_ids}")
|
||||
logger.debug(f"{len(movie_ids)} TMDb IDs Found: {movie_ids}")
|
||||
logger.debug(f"{len(show_ids)} TVDb IDs Found: {show_ids}")
|
||||
return movie_ids, show_ids
|
||||
return mal_ids
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import logging, os, re, requests
|
||||
import logging, os, re
|
||||
from datetime import datetime
|
||||
from modules import plex, util
|
||||
from modules.util import Failed, ImageData
|
||||
|
@ -7,13 +7,14 @@ from ruamel import yaml
|
|||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
github_base = "https://raw.githubusercontent.com/meisnate12/Plex-Meta-Manager-Configs/master/"
|
||||
|
||||
class Metadata:
|
||||
def __init__(self, config, library, file_type, path):
|
||||
self.config = config
|
||||
self.library = library
|
||||
self.type = file_type
|
||||
self.path = path
|
||||
self.github_base = "https://raw.githubusercontent.com/meisnate12/Plex-Meta-Manager-Configs/master/"
|
||||
logger.info("")
|
||||
logger.info(f"Loading Metadata {file_type}: {path}")
|
||||
def get_dict(attribute, attr_data, check_list=None):
|
||||
|
@ -37,8 +38,8 @@ class Metadata:
|
|||
return None
|
||||
try:
|
||||
if file_type in ["URL", "Git"]:
|
||||
content_path = path if file_type == "URL" else f"{self.github_base}{path}.yml"
|
||||
response = requests.get(content_path)
|
||||
content_path = path if file_type == "URL" else f"{github_base}{path}.yml"
|
||||
response = self.config.get(content_path)
|
||||
if response.status_code >= 400:
|
||||
raise Failed(f"URL Error: No file found at {content_path}")
|
||||
content = response.content
|
||||
|
@ -66,7 +67,7 @@ class Metadata:
|
|||
else:
|
||||
return self.collections
|
||||
|
||||
def update_metadata(self, TMDb, test):
|
||||
def update_metadata(self):
|
||||
if not self.metadata:
|
||||
return None
|
||||
logger.info("")
|
||||
|
@ -74,26 +75,28 @@ class Metadata:
|
|||
logger.info("")
|
||||
for mapping_name, meta in self.metadata.items():
|
||||
methods = {mm.lower(): mm for mm in meta}
|
||||
if test and ("test" not in methods or meta[methods["test"]] is not True):
|
||||
if self.config.test_mode and ("test" not in methods or meta[methods["test"]] is not True):
|
||||
continue
|
||||
|
||||
updated = False
|
||||
edits = {}
|
||||
advance_edits = {}
|
||||
|
||||
def add_edit(name, current, group, alias, key=None, value=None, var_type="str"):
|
||||
def add_edit(name, current_item, group, alias, key=None, value=None, var_type="str"):
|
||||
if value or name in alias:
|
||||
if value or group[alias[name]]:
|
||||
if key is None: key = name
|
||||
if value is None: value = group[alias[name]]
|
||||
try:
|
||||
current = str(getattr(current_item, key, ""))
|
||||
if var_type == "date":
|
||||
final_value = util.check_date(value, name, return_string=True, plex_date=True)
|
||||
final_value = util.validate_date(value, name, return_as="%Y-%m-%d")
|
||||
current = current[:-9]
|
||||
elif var_type == "float":
|
||||
final_value = util.check_number(value, name, number_type="float", minimum=0, maximum=10)
|
||||
final_value = util.parse(name, value, datatype="float", minimum=0, maximum=10)
|
||||
else:
|
||||
final_value = value
|
||||
if str(current) != str(final_value):
|
||||
if current != str(final_value):
|
||||
edits[f"{key}.value"] = final_value
|
||||
edits[f"{key}.locked"] = 1
|
||||
logger.info(f"Detail: {name} updated to {final_value}")
|
||||
|
@ -167,7 +170,7 @@ class Metadata:
|
|||
logger.info("")
|
||||
year = None
|
||||
if "year" in methods:
|
||||
year = util.check_number(meta[methods["year"]], "year", minimum=1800, maximum=datetime.now().year + 1)
|
||||
year = util.parse("year", meta, datatype="int", methods=methods, minimum=1800, maximum=datetime.now().year + 1)
|
||||
|
||||
title = mapping_name
|
||||
if "title" in methods:
|
||||
|
@ -209,13 +212,13 @@ class Metadata:
|
|||
logger.error("Metadata Error: tmdb_show attribute is blank")
|
||||
else:
|
||||
tmdb_is_movie = False
|
||||
tmdb_item = TMDb.get_show(util.regex_first_int(data, "Show"))
|
||||
tmdb_item = self.config.TMDb.get_show(util.regex_first_int(data, "Show"))
|
||||
elif "tmdb_movie" in methods:
|
||||
if meta[methods["tmdb_movie"]] is None:
|
||||
logger.error("Metadata Error: tmdb_movie attribute is blank")
|
||||
else:
|
||||
tmdb_is_movie = True
|
||||
tmdb_item = TMDb.get_movie(util.regex_first_int(meta[methods["tmdb_movie"]], "Movie"))
|
||||
tmdb_item = self.config.TMDb.get_movie(util.regex_first_int(meta[methods["tmdb_movie"]], "Movie"))
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
|
||||
|
@ -242,16 +245,16 @@ class Metadata:
|
|||
genres = [genre.name for genre in tmdb_item.genres]
|
||||
|
||||
edits = {}
|
||||
add_edit("title", item.title, meta, methods, value=title)
|
||||
add_edit("sort_title", item.titleSort, meta, methods, key="titleSort")
|
||||
add_edit("originally_available", str(item.originallyAvailableAt)[:-9], meta, methods, key="originallyAvailableAt", value=originally_available, var_type="date")
|
||||
add_edit("critic_rating", item.rating, meta, methods, value=rating, key="rating", var_type="float")
|
||||
add_edit("audience_rating", item.audienceRating, meta, methods, key="audienceRating", var_type="float")
|
||||
add_edit("content_rating", item.contentRating, meta, methods, key="contentRating")
|
||||
add_edit("original_title", item.originalTitle, meta, methods, key="originalTitle", value=original_title)
|
||||
add_edit("studio", item.studio, meta, methods, value=studio)
|
||||
add_edit("tagline", item.tagline, meta, methods, value=tagline)
|
||||
add_edit("summary", item.summary, meta, methods, value=summary)
|
||||
add_edit("title", item, meta, methods, value=title)
|
||||
add_edit("sort_title", item, meta, methods, key="titleSort")
|
||||
add_edit("originally_available", item, meta, methods, key="originallyAvailableAt", value=originally_available, var_type="date")
|
||||
add_edit("critic_rating", item, meta, methods, value=rating, key="rating", var_type="float")
|
||||
add_edit("audience_rating", item, meta, methods, key="audienceRating", var_type="float")
|
||||
add_edit("content_rating", item, meta, methods, key="contentRating")
|
||||
add_edit("original_title", item, meta, methods, key="originalTitle", value=original_title)
|
||||
add_edit("studio", item, meta, methods, value=studio)
|
||||
add_edit("tagline", item, meta, methods, value=tagline)
|
||||
add_edit("summary", item, meta, methods, value=summary)
|
||||
if self.library.edit_item(item, mapping_name, item_type, edits):
|
||||
updated = True
|
||||
|
||||
|
@ -306,8 +309,8 @@ class Metadata:
|
|||
logger.error("Metadata Error: sub attribute must be True or False")
|
||||
|
||||
edits = {}
|
||||
add_edit("title", season.title, season_dict, season_methods, value=title)
|
||||
add_edit("summary", season.summary, season_dict, season_methods)
|
||||
add_edit("title", season, season_dict, season_methods, value=title)
|
||||
add_edit("summary", season, season_dict, season_methods)
|
||||
if self.library.edit_item(season, season_id, "Season", edits):
|
||||
updated = True
|
||||
set_images(season, season_dict, season_methods)
|
||||
|
@ -352,13 +355,11 @@ class Metadata:
|
|||
else:
|
||||
logger.error("Metadata Error: sub attribute must be True or False")
|
||||
edits = {}
|
||||
add_edit("title", episode.title, episode_dict, episode_methods, value=title)
|
||||
add_edit("sort_title", episode.titleSort, episode_dict, episode_methods,
|
||||
key="titleSort")
|
||||
add_edit("rating", episode.rating, episode_dict, episode_methods)
|
||||
add_edit("originally_available", str(episode.originallyAvailableAt)[:-9],
|
||||
episode_dict, episode_methods, key="originallyAvailableAt")
|
||||
add_edit("summary", episode.summary, episode_dict, episode_methods)
|
||||
add_edit("title", episode, episode_dict, episode_methods, value=title)
|
||||
add_edit("sort_title", episode, episode_dict, episode_methods, key="titleSort")
|
||||
add_edit("rating", episode, episode_dict, episode_methods, var_type="float")
|
||||
add_edit("originally_available", episode, episode_dict, episode_methods, key="originallyAvailableAt", var_type="date")
|
||||
add_edit("summary", episode, episode_dict, episode_methods)
|
||||
if self.library.edit_item(episode, f"{season_id} Episode: {episode_id}", "Season", edits):
|
||||
updated = True
|
||||
if edit_tags("director", episode, episode_dict, episode_methods):
|
||||
|
@ -366,7 +367,7 @@ class Metadata:
|
|||
if edit_tags("writer", episode, episode_dict, episode_methods):
|
||||
updated = True
|
||||
set_images(episode, episode_dict, episode_methods)
|
||||
logger.info(f"Episode S{episode_id}E{season_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
||||
logger.info(f"Episode S{season_id}E{episode_id} of {mapping_name} Details Update {'Complete' if updated else 'Not Needed'}")
|
||||
else:
|
||||
logger.error(f"Metadata Error: episode {episode_str} invalid must have S##E## format")
|
||||
else:
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
import logging, requests
|
||||
import logging
|
||||
from modules import util
|
||||
from modules.util import Failed
|
||||
from retrying import retry
|
||||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
base_url = "http://www.omdbapi.com/"
|
||||
|
||||
class OMDbObj:
|
||||
def __init__(self, imdb_id, data):
|
||||
self._imdb_id = imdb_id
|
||||
|
@ -35,25 +36,23 @@ class OMDbObj:
|
|||
self.type = data["Type"]
|
||||
|
||||
class OMDb:
|
||||
def __init__(self, params, Cache=None):
|
||||
self.url = "http://www.omdbapi.com/"
|
||||
def __init__(self, config, params):
|
||||
self.config = config
|
||||
self.apikey = params["apikey"]
|
||||
self.limit = False
|
||||
self.Cache = Cache
|
||||
self.get_omdb("tt0080684")
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||
def get_omdb(self, imdb_id):
|
||||
expired = None
|
||||
if self.Cache:
|
||||
omdb_dict, expired = self.Cache.query_omdb(imdb_id)
|
||||
if self.config.Cache:
|
||||
omdb_dict, expired = self.config.Cache.query_omdb(imdb_id)
|
||||
if omdb_dict and expired is False:
|
||||
return OMDbObj(imdb_id, omdb_dict)
|
||||
response = requests.get(self.url, params={"i": imdb_id, "apikey": self.apikey})
|
||||
response = self.config.get(base_url, params={"i": imdb_id, "apikey": self.apikey})
|
||||
if response.status_code < 400:
|
||||
omdb = OMDbObj(imdb_id, response.json())
|
||||
if self.Cache:
|
||||
self.Cache.update_omdb(expired, omdb)
|
||||
if self.config.Cache:
|
||||
self.config.Cache.update_omdb(expired, omdb)
|
||||
return omdb
|
||||
else:
|
||||
error = response.json()['Error']
|
||||
|
|
288
modules/plex.py
288
modules/plex.py
|
@ -1,4 +1,4 @@
|
|||
import glob, logging, os, plexapi, requests, shutil, time
|
||||
import logging, os, plexapi, requests, shutil, time
|
||||
from modules import builder, util
|
||||
from modules.meta import Metadata
|
||||
from modules.util import Failed, ImageData
|
||||
|
@ -33,17 +33,15 @@ search_translation = {
|
|||
"episode_user_rating": "episode.userRating",
|
||||
"episode_plays": "episode.viewCount"
|
||||
}
|
||||
show_translation = {
|
||||
"hdr": "episode.hdr",
|
||||
"audioLanguage": "episode.audioLanguage",
|
||||
"subtitleLanguage": "episode.subtitleLanguage",
|
||||
"resolution": "episode.resolution"
|
||||
}
|
||||
modifier_translation = {
|
||||
"": "",
|
||||
".not": "!",
|
||||
".gt": "%3E%3E",
|
||||
".gte": "%3E",
|
||||
".lt": "%3C%3C",
|
||||
".lte": "%3C",
|
||||
".before": "%3C%3C",
|
||||
".after": "%3E%3E",
|
||||
".begins": "%3C",
|
||||
".ends": "%3E"
|
||||
"": "", ".not": "!", ".gt": "%3E%3E", ".gte": "%3E", ".lt": "%3C%3C", ".lte": "%3C",
|
||||
".before": "%3C%3C", ".after": "%3E%3E", ".begins": "%3C", ".ends": "%3E"
|
||||
}
|
||||
episode_sorting_options = {"default": "-1", "oldest": "0", "newest": "1"}
|
||||
keep_episodes_options = {"all": 0, "5_latest": 5, "3_latest": 3, "latest": 1, "past_3": -3, "past_7": -7, "past_30": -30}
|
||||
|
@ -57,6 +55,12 @@ plex_languages = ["default", "ar-SA", "ca-ES", "cs-CZ", "da-DK", "de-DE", "el-GR
|
|||
metadata_language_options = {lang.lower(): lang for lang in plex_languages}
|
||||
metadata_language_options["default"] = None
|
||||
use_original_title_options = {"default": -1, "no": 0, "yes": 1}
|
||||
collection_mode_options = {
|
||||
"default": "default", "hide": "hide",
|
||||
"hide_items": "hideItems", "hideitems": "hideItems",
|
||||
"show_items": "showItems", "showitems": "showItems"
|
||||
}
|
||||
collection_order_options = ["release", "alpha", "custom"]
|
||||
collection_mode_keys = {-1: "default", 0: "hide", 1: "hideItems", 2: "showItems"}
|
||||
collection_order_keys = {0: "release", 1: "alpha", 2: "custom"}
|
||||
item_advance_keys = {
|
||||
|
@ -114,13 +118,9 @@ or_searches = [
|
|||
"writer", "decade", "resolution", "year", "episode_title", "episode_year"
|
||||
]
|
||||
movie_only_searches = [
|
||||
"country", "country.not",
|
||||
"director", "director.not",
|
||||
"producer", "producer.not",
|
||||
"writer", "writer.not",
|
||||
"country", "country.not", "director", "director.not", "producer", "producer.not", "writer", "writer.not",
|
||||
"decade", "duplicate", "unplayed", "progress", "trash",
|
||||
"plays.gt", "plays.gte", "plays.lt", "plays.lte",
|
||||
"duration.gt", "duration.gte", "duration.lt", "duration.lte"
|
||||
"plays.gt", "plays.gte", "plays.lt", "plays.lte", "duration.gt", "duration.gte", "duration.lt", "duration.lte"
|
||||
]
|
||||
show_only_searches = [
|
||||
"network", "network.not",
|
||||
|
@ -132,21 +132,15 @@ show_only_searches = [
|
|||
"episode_user_rating.gt", "episode_user_rating.gte", "episode_user_rating.lt", "episode_user_rating.lte",
|
||||
"episode_year", "episode_year.not", "episode_year.gt", "episode_year.gte", "episode_year.lt", "episode_year.lte"
|
||||
]
|
||||
number_attributes = ["plays", "episode_plays", "added", "episode_added", "release", "episode_air_date", "duration", "tmdb_vote_count"]
|
||||
float_attributes = ["user_rating", "episode_user_rating", "critic_rating", "audience_rating"]
|
||||
boolean_attributes = [
|
||||
"hdr", "unmatched", "duplicate", "unplayed", "progress", "trash",
|
||||
"unplayed_episodes", "episode_unplayed", "episode_duplicate", "episode_progress", "episode_unmatched",
|
||||
]
|
||||
tmdb_attributes = ["actor", "director", "producer", "writer"]
|
||||
date_attributes = ["added", "episode_added", "release", "episode_air_date", "last_played", "episode_last_played"]
|
||||
search_display = {
|
||||
"added": "Date Added",
|
||||
"release": "Release Date",
|
||||
"hdr": "HDR",
|
||||
"progress": "In Progress",
|
||||
"episode_progress": "Episode In Progress"
|
||||
}
|
||||
date_attributes = ["added", "episode_added", "release", "episode_air_date", "last_played", "episode_last_played", "first_episode_aired", "last_episode_aired"]
|
||||
number_attributes = ["plays", "episode_plays", "duration", "tmdb_vote_count"] + date_attributes
|
||||
search_display = {"added": "Date Added", "release": "Release Date", "hdr": "HDR", "progress": "In Progress", "episode_progress": "Episode In Progress"}
|
||||
sorts = {
|
||||
None: None,
|
||||
"title.asc": "titleSort:asc", "title.desc": "titleSort:desc",
|
||||
|
@ -157,44 +151,10 @@ sorts = {
|
|||
"duration.asc": "duration:asc", "duration.desc": "duration:desc",
|
||||
"added.asc": "addedAt:asc", "added.desc": "addedAt:desc"
|
||||
}
|
||||
modifiers = {
|
||||
".not": "!",
|
||||
".begins": "<",
|
||||
".ends": ">",
|
||||
".before": "<<",
|
||||
".after": ">>",
|
||||
".gt": ">>",
|
||||
".gte": "__gte",
|
||||
".lt": "<<",
|
||||
".lte": "__lte"
|
||||
}
|
||||
mod_displays = {
|
||||
"": "is",
|
||||
".not": "is not",
|
||||
".begins": "begins with",
|
||||
".ends": "ends with",
|
||||
".before": "is before",
|
||||
".after": "is after",
|
||||
".gt": "is greater than",
|
||||
".gte": "is greater than or equal",
|
||||
".lt": "is less than",
|
||||
".lte": "is less than or equal"
|
||||
}
|
||||
modifiers = {".not": "!", ".begins": "<", ".ends": ">", ".before": "<<", ".after": ">>", ".gt": ">>", ".gte": "__gte", ".lt": "<<", ".lte": "__lte"}
|
||||
tags = [
|
||||
"actor",
|
||||
"audio_language",
|
||||
"collection",
|
||||
"content_rating",
|
||||
"country",
|
||||
"director",
|
||||
"genre",
|
||||
"label",
|
||||
"network",
|
||||
"producer",
|
||||
"resolution",
|
||||
"studio",
|
||||
"subtitle_language",
|
||||
"writer"
|
||||
"actor", "audio_language", "collection", "content_rating", "country", "director", "genre", "label",
|
||||
"network", "producer", "resolution", "studio", "subtitle_language", "writer"
|
||||
]
|
||||
movie_sorts = {
|
||||
"title.asc": "titleSort", "title.desc": "titleSort%3Adesc",
|
||||
|
@ -245,18 +205,17 @@ episode_sorts = {
|
|||
"added.asc": "addedAt", "added.desc": "addedAt%3Adesc",
|
||||
"random": "random"
|
||||
}
|
||||
sort_types = {
|
||||
"movies": (1, movie_sorts),
|
||||
"shows": (2, show_sorts),
|
||||
"seasons": (3, season_sorts),
|
||||
"episodes": (4, episode_sorts),
|
||||
}
|
||||
sort_types = {"movies": (1, movie_sorts), "shows": (2, show_sorts), "seasons": (3, season_sorts), "episodes": (4, episode_sorts)}
|
||||
|
||||
class Plex:
|
||||
def __init__(self, config, params):
|
||||
self.config = config
|
||||
self.plex = params["plex"]
|
||||
self.url = params["plex"]["url"]
|
||||
self.token = params["plex"]["token"]
|
||||
self.timeout = params["plex"]["timeout"]
|
||||
try:
|
||||
self.PlexServer = PlexServer(params["plex"]["url"], params["plex"]["token"], timeout=params["plex"]["timeout"])
|
||||
self.PlexServer = PlexServer(baseurl=self.url, token=self.token, session=self.config.session, timeout=self.timeout)
|
||||
except Unauthorized:
|
||||
raise Failed("Plex Error: Plex token is invalid")
|
||||
except ValueError as e:
|
||||
|
@ -277,7 +236,20 @@ class Plex:
|
|||
self.metadatas = []
|
||||
|
||||
self.metadata_files = []
|
||||
metadata = []
|
||||
for file_type, metadata_file in params["metadata_path"]:
|
||||
if file_type == "Folder":
|
||||
if os.path.isdir(metadata_file):
|
||||
yml_files = util.glob_filter(os.path.join(metadata_file, "*.yml"))
|
||||
if yml_files:
|
||||
metadata.extend([("File", yml) for yml in yml_files])
|
||||
else:
|
||||
logger.error(f"Config Error: No YAML (.yml) files found in {metadata_file}")
|
||||
else:
|
||||
logger.error(f"Config Error: Folder not found: {metadata_file}")
|
||||
else:
|
||||
metadata.append((file_type, metadata_file))
|
||||
for file_type, metadata_file in metadata:
|
||||
try:
|
||||
meta_obj = Metadata(config, self, file_type, metadata_file)
|
||||
if meta_obj.collections:
|
||||
|
@ -305,6 +277,7 @@ class Plex:
|
|||
self.mapping_name, output = util.validate_filename(self.original_mapping_name)
|
||||
if output:
|
||||
logger.info(output)
|
||||
self.image_table_name = self.config.Cache.get_image_table_name(self.original_mapping_name) if self.config.Cache else None
|
||||
self.missing_path = os.path.join(params["default_dir"], f"{self.name}_missing.yml")
|
||||
self.metadata_path = params["metadata_path"]
|
||||
self.asset_directory = params["asset_directory"]
|
||||
|
@ -315,26 +288,28 @@ class Plex:
|
|||
self.show_filtered = params["show_filtered"]
|
||||
self.show_missing = params["show_missing"]
|
||||
self.save_missing = params["save_missing"]
|
||||
self.missing_only_released = params["missing_only_released"]
|
||||
self.create_asset_folders = params["create_asset_folders"]
|
||||
self.mass_genre_update = params["mass_genre_update"]
|
||||
self.mass_audience_rating_update = params["mass_audience_rating_update"]
|
||||
self.mass_critic_rating_update = params["mass_critic_rating_update"]
|
||||
self.mass_trakt_rating_update = params["mass_trakt_rating_update"]
|
||||
self.split_duplicates = params["split_duplicates"]
|
||||
self.radarr_add_all = params["radarr_add_all"]
|
||||
self.sonarr_add_all = params["sonarr_add_all"]
|
||||
self.mass_update = self.mass_genre_update or self.mass_audience_rating_update or self.mass_critic_rating_update or self.split_duplicates or self.radarr_add_all or self.sonarr_add_all
|
||||
self.plex = params["plex"]
|
||||
self.url = params["plex"]["url"]
|
||||
self.token = params["plex"]["token"]
|
||||
self.timeout = params["plex"]["timeout"]
|
||||
self.mass_update = self.mass_genre_update or self.mass_audience_rating_update or self.mass_critic_rating_update \
|
||||
or self.mass_trakt_rating_update or self.split_duplicates or self.radarr_add_all or self.sonarr_add_all
|
||||
self.clean_bundles = params["plex"]["clean_bundles"]
|
||||
self.empty_trash = params["plex"]["empty_trash"]
|
||||
self.optimize = params["plex"]["optimize"]
|
||||
self.missing = {}
|
||||
self.movie_map = {}
|
||||
self.show_map = {}
|
||||
self.imdb_map = {}
|
||||
self.movie_rating_key_map = {}
|
||||
self.show_rating_key_map = {}
|
||||
self.run_again = []
|
||||
self.run_sort = []
|
||||
self.overlays = []
|
||||
|
||||
def get_all_collections(self):
|
||||
|
@ -400,11 +375,15 @@ class Plex:
|
|||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
||||
def reload(self, item):
|
||||
item.reload(checkFiles=False, includeAllConcerts=False, includeBandwidths=False, includeChapters=False,
|
||||
includeChildren=False, includeConcerts=False, includeExternalMedia=False, includeExtras=False,
|
||||
includeFields=False, includeGeolocation=False, includeLoudnessRamps=False, includeMarkers=False,
|
||||
includeOnDeck=False, includePopularLeaves=False, includeRelated=False,
|
||||
includeRelatedCount=0, includeReviews=False, includeStations=False)
|
||||
try:
|
||||
item.reload(checkFiles=False, includeAllConcerts=False, includeBandwidths=False, includeChapters=False,
|
||||
includeChildren=False, includeConcerts=False, includeExternalMedia=False, includeExtras=False,
|
||||
includeFields=False, includeGeolocation=False, includeLoudnessRamps=False, includeMarkers=False,
|
||||
includeOnDeck=False, includePopularLeaves=False, includeRelated=False,
|
||||
includeRelatedCount=0, includeReviews=False, includeStations=False)
|
||||
except (BadRequest, NotFound) as e:
|
||||
util.print_stacktrace()
|
||||
raise Failed(f"Item Failed to Load: {e}")
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
||||
def edit_query(self, item, edits, advanced=False):
|
||||
|
@ -416,6 +395,10 @@ class Plex:
|
|||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
||||
def _upload_image(self, item, image):
|
||||
logger.debug(item)
|
||||
logger.debug(image.is_poster)
|
||||
logger.debug(image.is_url)
|
||||
logger.debug(image.location)
|
||||
if image.is_poster and image.is_url:
|
||||
item.uploadPoster(url=image.location)
|
||||
elif image.is_poster:
|
||||
|
@ -427,19 +410,23 @@ class Plex:
|
|||
self.reload(item)
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
||||
def _upload_file_poster(self, item, image):
|
||||
def upload_file_poster(self, item, image):
|
||||
logger.debug(item)
|
||||
logger.debug(image)
|
||||
item.uploadPoster(filepath=image)
|
||||
self.reload(item)
|
||||
|
||||
def upload_images(self, item, poster=None, background=None, overlay=None):
|
||||
image = None
|
||||
image_compare = None
|
||||
poster_uploaded = False
|
||||
if self.config.Cache:
|
||||
image, image_compare = self.config.Cache.query_image_map(item.ratingKey, self.image_table_name)
|
||||
|
||||
if poster is not None:
|
||||
try:
|
||||
image = None
|
||||
if self.config.Cache:
|
||||
image, image_compare, _ = self.config.Cache.query_image_map(item.ratingKey, self.original_mapping_name, "poster")
|
||||
if str(poster.compare) != str(image_compare):
|
||||
image = None
|
||||
if image_compare and str(poster.compare) != str(image_compare):
|
||||
image = None
|
||||
if image is None or image != item.thumb:
|
||||
self._upload_image(item, poster)
|
||||
poster_uploaded = True
|
||||
|
@ -450,13 +437,15 @@ class Plex:
|
|||
util.print_stacktrace()
|
||||
logger.error(f"Detail: {poster.attribute} failed to update {poster.message}")
|
||||
|
||||
overlay_name = ""
|
||||
if overlay is not None:
|
||||
overlay_name, overlay_folder, overlay_image, temp_image = overlay
|
||||
image_overlay = None
|
||||
if self.config.Cache:
|
||||
image, _, image_overlay = self.config.Cache.query_image_map(item.ratingKey, self.original_mapping_name, "poster")
|
||||
if poster_uploaded or not image_overlay or image_overlay != overlay_name:
|
||||
item_labels = {item_tag.tag.lower(): item_tag.tag for item_tag in item.labels}
|
||||
for item_label in item_labels:
|
||||
if item_label.endswith(" overlay") and item_label != f"{overlay_name.lower()} overlay":
|
||||
raise Failed(f"Overlay Error: Poster already has an existing Overlay: {item_labels[item_label]}")
|
||||
if poster_uploaded or image is None or image != item.thumb or f"{overlay_name.lower()} overlay" not in item_labels:
|
||||
if not item.posterUrl:
|
||||
raise Failed(f"Overlay Error: No existing poster to Overlay for {item.title}")
|
||||
response = requests.get(item.posterUrl)
|
||||
if response.status_code >= 400:
|
||||
raise Failed(f"Overlay Error: Overlay Failed for {item.title}")
|
||||
|
@ -466,11 +455,12 @@ class Plex:
|
|||
shutil.copyfile(temp_image, os.path.join(overlay_folder, f"{item.ratingKey}.png"))
|
||||
while util.is_locked(temp_image):
|
||||
time.sleep(1)
|
||||
new_poster = Image.open(temp_image)
|
||||
new_poster = Image.open(temp_image).convert("RGBA")
|
||||
new_poster = new_poster.resize(overlay_image.size, Image.ANTIALIAS)
|
||||
new_poster.paste(overlay_image, (0, 0), overlay_image)
|
||||
new_poster.save(temp_image)
|
||||
self._upload_file_poster(item, temp_image)
|
||||
self.upload_file_poster(item, temp_image)
|
||||
self.edit_tags("label", item, add_tags=[f"{overlay_name} Overlay"])
|
||||
poster_uploaded = True
|
||||
logger.info(f"Detail: Overlay: {overlay_name} applied to {item.title}")
|
||||
|
||||
|
@ -479,7 +469,7 @@ class Plex:
|
|||
try:
|
||||
image = None
|
||||
if self.config.Cache:
|
||||
image, image_compare, _ = self.config.Cache.query_image_map(item.ratingKey, self.original_mapping_name, "background")
|
||||
image, image_compare = self.config.Cache.query_image_map(item.ratingKey, f"{self.image_table_name}_backgrounds")
|
||||
if str(background.compare) != str(image_compare):
|
||||
image = None
|
||||
if image is None or image != item.art:
|
||||
|
@ -494,15 +484,14 @@ class Plex:
|
|||
|
||||
if self.config.Cache:
|
||||
if poster_uploaded:
|
||||
self.config.Cache.update_image_map(item.ratingKey, self.original_mapping_name, "poster", item.thumb, poster.compare if poster else "", overlay_name)
|
||||
self.config.Cache.update_image_map(item.ratingKey, self.image_table_name, item.thumb, poster.compare if poster else "")
|
||||
if background_uploaded:
|
||||
self.config.Cache.update_image_map(item.ratingKey, self.original_mapping_name, "background", item.art, background.compare, "")
|
||||
self.config.Cache.update_image_map(item.ratingKey, f"{self.image_table_name}_backgrounds", item.art, background.compare)
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||
def get_search_choices(self, search_name, title=True):
|
||||
final_search = search_translation[search_name] if search_name in search_translation else search_name
|
||||
if final_search == "resolution" and self.is_show:
|
||||
final_search = "episode.resolution"
|
||||
final_search = show_translation[final_search] if self.is_show and final_search in show_translation else final_search
|
||||
try:
|
||||
choices = {}
|
||||
for choice in self.Plex.listFilterChoices(final_search):
|
||||
|
@ -510,7 +499,8 @@ class Plex:
|
|||
choices[choice.key.lower()] = choice.title if title else choice.key
|
||||
return choices
|
||||
except NotFound:
|
||||
raise Failed(f"Collection Error: plex search attribute: {search_name} only supported with Plex's New TV Agent")
|
||||
logger.debug(f"Search Attribute: {final_search}")
|
||||
raise Failed(f"Collection Error: plex search attribute: {search_name} not supported")
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
||||
def get_labels(self):
|
||||
|
@ -523,6 +513,12 @@ class Plex:
|
|||
else: method = None
|
||||
return self.Plex._server.query(key, method=method)
|
||||
|
||||
def move_item(self, collection, item, after=None):
|
||||
key = f"{collection.key}/items/{item}/move"
|
||||
if after:
|
||||
key += f"?after={after}"
|
||||
self._query(key, put=True)
|
||||
|
||||
def smart_label_url(self, title, sort):
|
||||
labels = self.get_labels()
|
||||
if title not in labels:
|
||||
|
@ -586,13 +582,13 @@ class Plex:
|
|||
|
||||
def get_collection(self, data):
|
||||
if isinstance(data, int):
|
||||
collection = self.fetchItem(data)
|
||||
return self.fetchItem(data)
|
||||
elif isinstance(data, Collection):
|
||||
collection = data
|
||||
return data
|
||||
else:
|
||||
collection = util.choose_from_list(self.search(title=str(data), libtype="collection"), "collection", str(data), exact=True)
|
||||
if collection:
|
||||
return collection
|
||||
for d in self.search(title=str(data), libtype="collection"):
|
||||
if d.title == data:
|
||||
return d
|
||||
raise Failed(f"Plex Error: Collection {data} not found")
|
||||
|
||||
def validate_collections(self, collections):
|
||||
|
@ -604,18 +600,18 @@ class Plex:
|
|||
raise Failed(f"Collection Error: No valid Plex Collections in {collections}")
|
||||
return valid_collections
|
||||
|
||||
def get_items(self, method, data):
|
||||
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||
def get_rating_keys(self, method, data):
|
||||
media_type = "Movie" if self.is_movie else "Show"
|
||||
items = []
|
||||
if method == "plex_all":
|
||||
logger.info(f"Processing {pretty} {media_type}s")
|
||||
logger.info(f"Processing Plex All {media_type}s")
|
||||
items = self.get_all()
|
||||
elif method == "plex_search":
|
||||
util.print_multiline(data[1], info=True)
|
||||
items = self.get_filter_items(data[2])
|
||||
elif method == "plex_collectionless":
|
||||
good_collections = []
|
||||
logger.info(f"Processing Plex Collectionless")
|
||||
logger.info("Collections Excluded")
|
||||
for col in self.get_all_collections():
|
||||
keep_collection = True
|
||||
|
@ -653,7 +649,10 @@ class Plex:
|
|||
else:
|
||||
raise Failed(f"Plex Error: Method {method} not supported")
|
||||
if len(items) > 0:
|
||||
return [item.ratingKey for item in items]
|
||||
ids = [item.ratingKey for item in items]
|
||||
logger.debug("")
|
||||
logger.debug(f"{len(ids)} Keys Found: {ids}")
|
||||
return ids
|
||||
else:
|
||||
raise Failed("Plex Error: No Items found in Plex")
|
||||
|
||||
|
@ -698,26 +697,19 @@ class Plex:
|
|||
for i, item in enumerate(items, 1):
|
||||
util.print_return(f"Processing: {i}/{len(items)} {item.title}")
|
||||
if item.ratingKey not in self.movie_rating_key_map and item.ratingKey not in self.show_rating_key_map:
|
||||
id_type, main_id = self.config.Convert.get_id(item, self)
|
||||
id_type, main_id, imdb_id = self.config.Convert.get_id(item, self)
|
||||
if main_id:
|
||||
if not isinstance(main_id, list):
|
||||
main_id = [main_id]
|
||||
if id_type == "movie":
|
||||
self.movie_rating_key_map[item.ratingKey] = main_id[0]
|
||||
for m in main_id:
|
||||
if m in self.movie_map:
|
||||
self.movie_map[m].append(item.ratingKey)
|
||||
else:
|
||||
self.movie_map[m] = [item.ratingKey]
|
||||
util.add_dict_list(main_id, item.ratingKey, self.movie_map)
|
||||
elif id_type == "show":
|
||||
self.show_rating_key_map[item.ratingKey] = main_id[0]
|
||||
for m in main_id:
|
||||
if m in self.show_map:
|
||||
self.show_map[m].append(item.ratingKey)
|
||||
else:
|
||||
self.show_map[m] = [item.ratingKey]
|
||||
util.add_dict_list(main_id, item.ratingKey, self.show_map)
|
||||
if imdb_id:
|
||||
util.add_dict_list(imdb_id, item.ratingKey, self.imdb_map)
|
||||
logger.info("")
|
||||
logger.info(util.adjust_space(f"Processed {len(items)} {'Movies' if self.is_movie else 'Shows'}"))
|
||||
return items
|
||||
|
||||
def get_tmdb_from_map(self, item):
|
||||
return self.movie_rating_key_map[item.ratingKey] if item.ratingKey in self.movie_rating_key_map else None
|
||||
|
@ -729,7 +721,10 @@ class Plex:
|
|||
kwargs = {}
|
||||
if year is not None:
|
||||
kwargs["year"] = year
|
||||
return util.choose_from_list(self.search(title=str(data), **kwargs), "movie" if self.is_movie else "show", str(data), exact=True)
|
||||
for d in self.search(title=str(data), **kwargs):
|
||||
if d.title == data:
|
||||
return d
|
||||
return None
|
||||
|
||||
def edit_item(self, item, name, item_type, edits, advanced=False):
|
||||
if len(edits) > 0:
|
||||
|
@ -750,8 +745,8 @@ class Plex:
|
|||
key = builder.filter_translation[attr] if attr in builder.filter_translation else attr
|
||||
if add_tags or remove_tags or sync_tags:
|
||||
_add_tags = add_tags if add_tags else []
|
||||
_remove_tags = remove_tags if remove_tags else []
|
||||
_sync_tags = sync_tags if sync_tags else []
|
||||
_remove_tags = [t.lower() for t in remove_tags] if remove_tags else []
|
||||
_sync_tags = [t.lower() for t in sync_tags] if sync_tags else []
|
||||
try:
|
||||
_item_tags = [item_tag.tag.lower() for item_tag in getattr(obj, key)]
|
||||
except BadRequest:
|
||||
|
@ -761,39 +756,40 @@ class Plex:
|
|||
if _add:
|
||||
updated = True
|
||||
self.query_data(getattr(obj, f"add{attr.capitalize()}"), _add)
|
||||
logger.info(f"Detail: {attr.capitalize()} {_add} added")
|
||||
logger.info(f"Detail: {attr.capitalize()} {','.join(_add)} added to {obj.title}")
|
||||
if _remove:
|
||||
updated = True
|
||||
self.query_data(getattr(obj, f"remove{attr.capitalize()}"), _remove)
|
||||
logger.info(f"Detail: {attr.capitalize()} {_remove} removed")
|
||||
logger.info(f"Detail: {attr.capitalize()} {','.join(_remove)} removed to {obj.title}")
|
||||
return updated
|
||||
|
||||
def update_item_from_assets(self, item, overlay=None):
|
||||
name = os.path.basename(os.path.dirname(item.locations[0]) if self.is_movie else item.locations[0])
|
||||
found_one = False
|
||||
def update_item_from_assets(self, item, overlay=None, create=False):
|
||||
name = os.path.basename(os.path.dirname(str(item.locations[0])) if self.is_movie else str(item.locations[0]))
|
||||
logger.debug(name)
|
||||
found_folder = False
|
||||
poster = None
|
||||
background = None
|
||||
for ad in self.asset_directory:
|
||||
poster = None
|
||||
background = None
|
||||
item_dir = None
|
||||
if self.asset_folders:
|
||||
if os.path.isdir(os.path.join(ad, name)):
|
||||
item_dir = os.path.join(ad, name)
|
||||
else:
|
||||
matches = glob.glob(os.path.join(ad, "*", name))
|
||||
matches = util.glob_filter(os.path.join(ad, "*", name))
|
||||
if len(matches) > 0:
|
||||
item_dir = os.path.abspath(matches[0])
|
||||
if item_dir is None:
|
||||
continue
|
||||
found_one = True
|
||||
found_folder = True
|
||||
poster_filter = os.path.join(item_dir, "poster.*")
|
||||
background_filter = os.path.join(item_dir, "background.*")
|
||||
else:
|
||||
poster_filter = os.path.join(ad, f"{name}.*")
|
||||
background_filter = os.path.join(ad, f"{name}_background.*")
|
||||
matches = glob.glob(poster_filter)
|
||||
matches = util.glob_filter(poster_filter)
|
||||
if len(matches) > 0:
|
||||
poster = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title}'s ", is_url=False)
|
||||
matches = glob.glob(background_filter)
|
||||
matches = util.glob_filter(background_filter)
|
||||
if len(matches) > 0:
|
||||
background = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title}'s ", is_poster=False, is_url=False)
|
||||
if poster or background:
|
||||
|
@ -804,7 +800,7 @@ class Plex:
|
|||
season_filter = os.path.join(item_dir, f"Season{'0' if season.seasonNumber < 10 else ''}{season.seasonNumber}.*")
|
||||
else:
|
||||
season_filter = os.path.join(ad, f"{name}_Season{'0' if season.seasonNumber < 10 else ''}{season.seasonNumber}.*")
|
||||
matches = glob.glob(season_filter)
|
||||
matches = util.glob_filter(season_filter)
|
||||
if len(matches) > 0:
|
||||
season_poster = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title} Season {season.seasonNumber}'s ", is_url=False)
|
||||
self.upload_images(season, poster=season_poster)
|
||||
|
@ -813,16 +809,21 @@ class Plex:
|
|||
episode_filter = os.path.join(item_dir, f"{episode.seasonEpisode.upper()}.*")
|
||||
else:
|
||||
episode_filter = os.path.join(ad, f"{name}_{episode.seasonEpisode.upper()}.*")
|
||||
matches = glob.glob(episode_filter)
|
||||
matches = util.glob_filter(episode_filter)
|
||||
if len(matches) > 0:
|
||||
episode_poster = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title} {episode.seasonEpisode.upper()}'s ", is_url=False)
|
||||
self.upload_images(episode, poster=episode_poster)
|
||||
if not found_one and overlay:
|
||||
if not poster and overlay:
|
||||
self.upload_images(item, overlay=overlay)
|
||||
elif not found_one:
|
||||
if create and self.asset_folders and not found_folder:
|
||||
os.makedirs(os.path.join(self.asset_directory[0], name), exist_ok=True)
|
||||
logger.info(f"Asset Directory Created: {os.path.join(self.asset_directory[0], name)}")
|
||||
elif not overlay and self.asset_folders and not found_folder:
|
||||
logger.error(f"Asset Warning: No asset folder found called '{name}'")
|
||||
elif not poster and not background:
|
||||
logger.error(f"Asset Warning: No poster or background found in an assets folder for '{name}'")
|
||||
|
||||
def find_collection_assets(self, item, name=None):
|
||||
def find_collection_assets(self, item, name=None, create=False):
|
||||
if name is None:
|
||||
name = item.title
|
||||
for ad in self.asset_directory:
|
||||
|
@ -836,12 +837,15 @@ class Plex:
|
|||
else:
|
||||
poster_filter = os.path.join(ad, f"{name}.*")
|
||||
background_filter = os.path.join(ad, f"{name}_background.*")
|
||||
matches = glob.glob(poster_filter)
|
||||
matches = util.glob_filter(poster_filter)
|
||||
if len(matches) > 0:
|
||||
poster = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title}'s ", is_url=False)
|
||||
matches = glob.glob(background_filter)
|
||||
matches = util.glob_filter(background_filter)
|
||||
if len(matches) > 0:
|
||||
background = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title}'s ", is_poster=False, is_url=False)
|
||||
if poster or background:
|
||||
return poster, background
|
||||
if create and self.asset_folders and not os.path.isdir(os.path.join(self.asset_directory[0], name)):
|
||||
os.makedirs(os.path.join(self.asset_directory[0], name), exist_ok=True)
|
||||
logger.info(f"Asset Directory Created: {os.path.join(self.asset_directory[0], name)}")
|
||||
return None, None
|
||||
|
|
|
@ -6,27 +6,21 @@ from arrapi.exceptions import ArrException, Invalid
|
|||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
availability_translation = {
|
||||
"announced": "announced",
|
||||
"cinemas": "inCinemas",
|
||||
"released": "released",
|
||||
"db": "preDB"
|
||||
}
|
||||
apply_tags_translation = {
|
||||
"": "add",
|
||||
"sync": "replace",
|
||||
"remove": "remove"
|
||||
}
|
||||
availability_translation = {"announced": "announced", "cinemas": "inCinemas", "released": "released", "db": "preDB"}
|
||||
apply_tags_translation = {"": "add", "sync": "replace", "remove": "remove"}
|
||||
availability_descriptions = {"announced": "For Announced", "cinemas": "For In Cinemas", "released": "For Released", "db": "For PreDB"}
|
||||
|
||||
class Radarr:
|
||||
def __init__(self, params):
|
||||
def __init__(self, config, params):
|
||||
self.config = config
|
||||
self.url = params["url"]
|
||||
self.token = params["token"]
|
||||
try:
|
||||
self.api = RadarrAPI(self.url, self.token)
|
||||
self.api = RadarrAPI(self.url, self.token, session=self.config.session)
|
||||
except ArrException as e:
|
||||
raise Failed(e)
|
||||
self.add = params["add"]
|
||||
self.add_existing = params["add_existing"]
|
||||
self.root_folder_path = params["root_folder_path"]
|
||||
self.monitor = params["monitor"]
|
||||
self.availability = params["availability"]
|
||||
|
@ -83,4 +77,3 @@ class Radarr:
|
|||
logger.info("")
|
||||
for tmdb_id in not_exists:
|
||||
logger.info(f"TMDb ID Not in Radarr | {tmdb_id}")
|
||||
|
||||
|
|
|
@ -8,30 +8,37 @@ logger = logging.getLogger("Plex Meta Manager")
|
|||
|
||||
series_type = ["standard", "daily", "anime"]
|
||||
monitor_translation = {
|
||||
"all": "all",
|
||||
"future": "future",
|
||||
"missing": "missing",
|
||||
"existing": "existing",
|
||||
"pilot": "pilot",
|
||||
"first": "firstSeason",
|
||||
"latest": "latestSeason",
|
||||
"none": "none"
|
||||
"all": "all", "future": "future", "missing": "missing", "existing": "existing",
|
||||
"pilot": "pilot", "first": "firstSeason", "latest": "latestSeason", "none": "none"
|
||||
}
|
||||
apply_tags_translation = {
|
||||
"": "add",
|
||||
"sync": "replace",
|
||||
"remove": "remove"
|
||||
series_type_descriptions = {
|
||||
"standard": "Episodes released with SxxEyy pattern",
|
||||
"daily": "Episodes released daily or less frequently that use year-month-day (2017-05-25)",
|
||||
"anime": "Episodes released using an absolute episode number"
|
||||
}
|
||||
monitor_descriptions = {
|
||||
"all": "Monitor all episodes except specials",
|
||||
"future": "Monitor episodes that have not aired yet",
|
||||
"missing": "Monitor episodes that do not have files or have not aired yet",
|
||||
"existing": "Monitor episodes that have files or have not aired yet",
|
||||
"pilot": "Monitor the first episode. All other episodes will be ignored",
|
||||
"first": "Monitor all episodes of the first season. All other seasons will be ignored",
|
||||
"latest": "Monitor all episodes of the latest season and future seasons",
|
||||
"none": "No episodes will be monitored"
|
||||
}
|
||||
apply_tags_translation = {"": "add", "sync": "replace", "remove": "remove"}
|
||||
|
||||
class Sonarr:
|
||||
def __init__(self, params):
|
||||
def __init__(self, config, params):
|
||||
self.config = config
|
||||
self.url = params["url"]
|
||||
self.token = params["token"]
|
||||
try:
|
||||
self.api = SonarrAPI(self.url, self.token)
|
||||
self.api = SonarrAPI(self.url, self.token, session=self.config.session)
|
||||
except ArrException as e:
|
||||
raise Failed(e)
|
||||
self.add = params["add"]
|
||||
self.add_existing = params["add_existing"]
|
||||
self.root_folder_path = params["root_folder_path"]
|
||||
self.monitor = params["monitor"]
|
||||
self.quality_profile = params["quality_profile"]
|
||||
|
|
18
modules/stevenlu.py
Normal file
18
modules/stevenlu.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
import logging
|
||||
from modules.util import Failed
|
||||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
builders = ["stevenlu_popular"]
|
||||
base_url = "https://s3.amazonaws.com/popular-movies/movies.json"
|
||||
|
||||
class StevenLu:
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
|
||||
def get_stevenlu_ids(self, method):
|
||||
if method == "stevenlu_popular":
|
||||
logger.info(f"Processing StevenLu Popular Movies")
|
||||
return [(i["imdb_id"], "imdb") for i in self.config.get_json(base_url)]
|
||||
else:
|
||||
raise Failed(f"StevenLu Error: Method {method} not supported")
|
|
@ -1,15 +1,15 @@
|
|||
import logging, requests
|
||||
import logging
|
||||
from modules import util
|
||||
from modules.util import Failed
|
||||
from plexapi.exceptions import BadRequest, NotFound
|
||||
from retrying import retry
|
||||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
builders = ["tautulli_popular", "tautulli_watched"]
|
||||
|
||||
class Tautulli:
|
||||
def __init__(self, params):
|
||||
def __init__(self, config, params):
|
||||
self.config = config
|
||||
self.url = params["url"]
|
||||
self.apikey = params["apikey"]
|
||||
try:
|
||||
|
@ -20,7 +20,7 @@ class Tautulli:
|
|||
if response["response"]["result"] != "success":
|
||||
raise Failed(f"Tautulli Error: {response['response']['message']}")
|
||||
|
||||
def get_items(self, library, params):
|
||||
def get_rating_keys(self, library, params):
|
||||
query_size = int(params["list_size"]) + int(params["list_buffer"])
|
||||
logger.info(f"Processing Tautulli Most {params['list_type'].capitalize()}: {params['list_size']} {'Movies' if library.is_movie else 'Shows'}")
|
||||
response = self._request(f"{self.url}/api/v2?apikey={self.apikey}&cmd=get_home_stats&time_range={params['list_days']}&stats_count={query_size}")
|
||||
|
@ -50,6 +50,8 @@ class Tautulli:
|
|||
logger.error(f"Plex Error: Item {item} not found")
|
||||
continue
|
||||
count += 1
|
||||
logger.debug("")
|
||||
logger.debug(f"{len(rating_keys)} Keys Found: {rating_keys}")
|
||||
return rating_keys
|
||||
|
||||
def _section_id(self, library_name):
|
||||
|
@ -62,7 +64,6 @@ class Tautulli:
|
|||
if section_id: return section_id
|
||||
else: raise Failed(f"Tautulli Error: No Library named {library_name} in the response")
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||
def _request(self, url):
|
||||
logger.debug(f"Tautulli URL: {url.replace(self.apikey, '###############')}")
|
||||
return requests.get(url).json()
|
||||
return self.config.get_json(url)
|
||||
|
|
244
modules/tmdb.py
244
modules/tmdb.py
|
@ -1,5 +1,4 @@
|
|||
import logging, tmdbv3api
|
||||
from datetime import datetime
|
||||
from modules import util
|
||||
from modules.util import Failed
|
||||
from retrying import retry
|
||||
|
@ -8,110 +7,53 @@ from tmdbv3api.exceptions import TMDbException
|
|||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
builders = [
|
||||
"tmdb_actor",
|
||||
"tmdb_actor_details",
|
||||
"tmdb_collection",
|
||||
"tmdb_collection_details",
|
||||
"tmdb_company",
|
||||
"tmdb_crew",
|
||||
"tmdb_crew_details",
|
||||
"tmdb_director",
|
||||
"tmdb_director_details",
|
||||
"tmdb_discover",
|
||||
"tmdb_keyword",
|
||||
"tmdb_list",
|
||||
"tmdb_list_details",
|
||||
"tmdb_movie",
|
||||
"tmdb_movie_details",
|
||||
"tmdb_network",
|
||||
"tmdb_now_playing",
|
||||
"tmdb_popular",
|
||||
"tmdb_producer",
|
||||
"tmdb_producer_details",
|
||||
"tmdb_show",
|
||||
"tmdb_show_details",
|
||||
"tmdb_top_rated",
|
||||
"tmdb_trending_daily",
|
||||
"tmdb_trending_weekly",
|
||||
"tmdb_writer",
|
||||
"tmdb_writer_details"
|
||||
"tmdb_actor", "tmdb_actor_details", "tmdb_collection", "tmdb_collection_details", "tmdb_company",
|
||||
"tmdb_crew", "tmdb_crew_details", "tmdb_director", "tmdb_director_details", "tmdb_discover",
|
||||
"tmdb_keyword", "tmdb_list", "tmdb_list_details", "tmdb_movie", "tmdb_movie_details", "tmdb_network",
|
||||
"tmdb_now_playing", "tmdb_popular", "tmdb_producer", "tmdb_producer_details", "tmdb_show", "tmdb_show_details",
|
||||
"tmdb_top_rated", "tmdb_trending_daily", "tmdb_trending_weekly", "tmdb_writer", "tmdb_writer_details"
|
||||
]
|
||||
type_map = {
|
||||
"tmdb_actor": "Person",
|
||||
"tmdb_actor_details": "Person",
|
||||
"tmdb_collection": "Collection",
|
||||
"tmdb_collection_details": "Collection",
|
||||
"tmdb_company": "Company",
|
||||
"tmdb_crew": "Person",
|
||||
"tmdb_crew_details": "Person",
|
||||
"tmdb_director": "Person",
|
||||
"tmdb_director_details": "Person",
|
||||
"tmdb_keyword": "Keyword",
|
||||
"tmdb_list": "List",
|
||||
"tmdb_list_details": "List",
|
||||
"tmdb_movie": "Movie",
|
||||
"tmdb_movie_details": "Movie",
|
||||
"tmdb_network": "Network",
|
||||
"tmdb_person": "Person",
|
||||
"tmdb_producer": "Person",
|
||||
"tmdb_producer_details": "Person",
|
||||
"tmdb_show": "Show",
|
||||
"tmdb_show_details": "Show",
|
||||
"tmdb_writer": "Person",
|
||||
"tmdb_writer_details": "Person"
|
||||
"tmdb_actor": "Person", "tmdb_actor_details": "Person", "tmdb_crew": "Person", "tmdb_crew_details": "Person",
|
||||
"tmdb_collection": "Collection", "tmdb_collection_details": "Collection", "tmdb_company": "Company",
|
||||
"tmdb_director": "Person", "tmdb_director_details": "Person", "tmdb_keyword": "Keyword",
|
||||
"tmdb_list": "List", "tmdb_list_details": "List", "tmdb_movie": "Movie", "tmdb_movie_details": "Movie",
|
||||
"tmdb_network": "Network", "tmdb_person": "Person", "tmdb_producer": "Person", "tmdb_producer_details": "Person",
|
||||
"tmdb_show": "Show", "tmdb_show_details": "Show", "tmdb_writer": "Person", "tmdb_writer_details": "Person"
|
||||
}
|
||||
discover_movie = [
|
||||
"language", "with_original_language", "region", "sort_by",
|
||||
discover_all = [
|
||||
"language", "with_original_language", "region", "sort_by", "with_cast", "with_crew", "with_people",
|
||||
"certification_country", "certification", "certification.lte", "certification.gte",
|
||||
"include_adult",
|
||||
"primary_release_year", "primary_release_date.gte", "primary_release_date.lte",
|
||||
"release_date.gte", "release_date.lte", "year",
|
||||
"vote_count.gte", "vote_count.lte",
|
||||
"vote_average.gte", "vote_average.lte",
|
||||
"with_cast", "with_crew", "with_people",
|
||||
"with_companies",
|
||||
"with_genres", "without_genres",
|
||||
"with_keywords", "without_keywords",
|
||||
"with_runtime.gte", "with_runtime.lte"
|
||||
"year", "primary_release_year", "primary_release_date.gte", "primary_release_date.lte",
|
||||
"release_date.gte", "release_date.lte", "vote_count.gte", "vote_count.lte",
|
||||
"vote_average.gte", "vote_average.lte", "with_runtime.gte", "with_runtime.lte",
|
||||
"with_companies", "with_genres", "without_genres", "with_keywords", "without_keywords", "include_adult",
|
||||
"timezone", "screened_theatrically", "include_null_first_air_dates", "limit",
|
||||
"air_date.gte", "air_date.lte", "first_air_date.gte", "first_air_date.lte", "first_air_date_year", "with_networks"
|
||||
]
|
||||
discover_tv = [
|
||||
"language", "with_original_language", "timezone", "sort_by",
|
||||
"air_date.gte", "air_date.lte",
|
||||
"first_air_date.gte", "first_air_date.lte", "first_air_date_year",
|
||||
"vote_count.gte", "vote_count.lte",
|
||||
"vote_average.gte", "vote_average.lte",
|
||||
"with_genres", "without_genres",
|
||||
"with_keywords", "without_keywords",
|
||||
"with_networks", "with_companies",
|
||||
"with_runtime.gte", "with_runtime.lte",
|
||||
"include_null_first_air_dates",
|
||||
"screened_theatrically"
|
||||
discover_movie_only = [
|
||||
"region", "with_cast", "with_crew", "with_people", "certification_country", "certification",
|
||||
"year", "primary_release_year", "primary_release_date", "release_date", "include_adult"
|
||||
]
|
||||
discover_tv_only = [
|
||||
"timezone", "screened_theatrically", "include_null_first_air_dates",
|
||||
"air_date", "first_air_date", "first_air_date_year", "with_networks",
|
||||
]
|
||||
discover_dates = [
|
||||
"primary_release_date.gte", "primary_release_date.lte",
|
||||
"release_date.gte", "release_date.lte",
|
||||
"air_date.gte", "air_date.lte",
|
||||
"first_air_date.gte", "first_air_date.lte"
|
||||
"primary_release_date.gte", "primary_release_date.lte", "release_date.gte", "release_date.lte",
|
||||
"air_date.gte", "air_date.lte", "first_air_date.gte", "first_air_date.lte"
|
||||
]
|
||||
discover_movie_sort = [
|
||||
"popularity.asc", "popularity.desc",
|
||||
"release_date.asc", "release_date.desc",
|
||||
"revenue.asc", "revenue.desc",
|
||||
"primary_release_date.asc", "primary_release_date.desc",
|
||||
"original_title.asc", "original_title.desc",
|
||||
"vote_average.asc", "vote_average.desc",
|
||||
"vote_count.asc", "vote_count.desc"
|
||||
]
|
||||
discover_tv_sort = [
|
||||
"vote_average.desc", "vote_average.asc",
|
||||
"first_air_date.desc", "first_air_date.asc",
|
||||
"popularity.desc", "popularity.asc"
|
||||
"popularity.asc", "popularity.desc", "release_date.asc", "release_date.desc", "revenue.asc", "revenue.desc",
|
||||
"primary_release_date.asc", "primary_release_date.desc", "original_title.asc", "original_title.desc",
|
||||
"vote_average.asc", "vote_average.desc", "vote_count.asc", "vote_count.desc"
|
||||
]
|
||||
discover_tv_sort = ["vote_average.desc", "vote_average.asc", "first_air_date.desc", "first_air_date.asc", "popularity.desc", "popularity.asc"]
|
||||
|
||||
class TMDb:
|
||||
def __init__(self, config, params):
|
||||
self.config = config
|
||||
self.TMDb = tmdbv3api.TMDb()
|
||||
self.TMDb = tmdbv3api.TMDb(session=self.config.session)
|
||||
self.TMDb.api_key = params["apikey"]
|
||||
self.TMDb.language = params["language"]
|
||||
response = tmdbv3api.Configuration().info()
|
||||
|
@ -137,16 +79,29 @@ class TMDb:
|
|||
id_to_return = self.Movie.external_ids(tmdb_id)[convert_to] if is_movie else self.TV.external_ids(tmdb_id)[convert_to]
|
||||
if not id_to_return or (convert_to == "tvdb_id" and id_to_return == 0):
|
||||
raise Failed(f"TMDb Error: No {convert_to.upper().replace('B_', 'b ')} found for TMDb ID {tmdb_id}")
|
||||
return id_to_return
|
||||
return id_to_return if convert_to == "imdb_id" else int(id_to_return)
|
||||
except TMDbException:
|
||||
raise Failed(f"TMDb Error: TMDb {'Movie' if is_movie else 'Show'} ID: {tmdb_id} not found")
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||
def convert_to(self, external_id, external_source, is_movie):
|
||||
search_results = self.Movie.external(external_id=external_id, external_source=external_source)
|
||||
search = search_results["movie_results" if is_movie else "tv_results"]
|
||||
if len(search) == 1: return int(search[0]["id"])
|
||||
else: raise Failed(f"TMDb Error: No TMDb ID found for {external_source.upper().replace('B_', 'b ')} {external_id}")
|
||||
def convert_to(self, external_id, external_source):
|
||||
return self.Movie.external(external_id=external_id, external_source=external_source)
|
||||
|
||||
def convert_tvdb_to(self, tvdb_id):
|
||||
search = self.convert_to(tvdb_id, "tvdb_id")
|
||||
if len(search["tv_results"]) == 1:
|
||||
return int(search["tv_results"][0]["id"])
|
||||
else:
|
||||
raise Failed(f"TMDb Error: No TMDb ID found for TVDb ID {tvdb_id}")
|
||||
|
||||
def convert_imdb_to(self, imdb_id):
|
||||
search = self.convert_to(imdb_id, "imdb_id")
|
||||
if len(search["movie_results"]) > 0:
|
||||
return int(search["movie_results"][0]["id"]), "movie"
|
||||
elif len(search["tv_results"]) > 0:
|
||||
return int(search["tv_results"][0]["id"]), "show"
|
||||
else:
|
||||
raise Failed(f"TMDb Error: No TMDb ID found for IMDb ID {imdb_id}")
|
||||
|
||||
def get_movie_show_or_collection(self, tmdb_id, is_movie):
|
||||
if is_movie:
|
||||
|
@ -202,35 +157,27 @@ class TMDb:
|
|||
except TMDbException as e: raise Failed(f"TMDb Error: No List found for TMDb ID {tmdb_id}: {e}")
|
||||
|
||||
def _credits(self, tmdb_id, actor=False, crew=False, director=False, producer=False, writer=False):
|
||||
movie_ids = []
|
||||
show_ids = []
|
||||
ids = []
|
||||
actor_credits = self._person_credits(tmdb_id)
|
||||
if actor:
|
||||
for credit in actor_credits.cast:
|
||||
if credit.media_type == "movie":
|
||||
movie_ids.append(credit.id)
|
||||
ids.append((credit.id, "tmdb"))
|
||||
elif credit.media_type == "tv":
|
||||
try:
|
||||
show_ids.append(self.config.Convert.tmdb_to_tvdb(credit.id, fail=True))
|
||||
except Failed as e:
|
||||
logger.warning(e)
|
||||
ids.append((credit.id, "tmdb_show"))
|
||||
for credit in actor_credits.crew:
|
||||
if crew or \
|
||||
(director and credit.department == "Directing") or \
|
||||
(producer and credit.department == "Production") or \
|
||||
(writer and credit.department == "Writing"):
|
||||
if credit.media_type == "movie":
|
||||
movie_ids.append(credit.id)
|
||||
ids.append((credit.id, "tmdb"))
|
||||
elif credit.media_type == "tv":
|
||||
try:
|
||||
show_ids.append(self.config.Convert.tmdb_to_tvdb(credit.id, fail=True))
|
||||
except Failed as e:
|
||||
logger.warning(e)
|
||||
return movie_ids, show_ids
|
||||
ids.append((credit.id, "tmdb_show"))
|
||||
return ids
|
||||
|
||||
def _pagenation(self, method, amount, is_movie):
|
||||
ids = []
|
||||
count = 0
|
||||
for x in range(int(amount / 20) + 1):
|
||||
if method == "tmdb_popular": tmdb_items = self.Movie.popular(x + 1) if is_movie else self.TV.popular(x + 1)
|
||||
elif method == "tmdb_top_rated": tmdb_items = self.Movie.top_rated(x + 1) if is_movie else self.TV.top_rated(x + 1)
|
||||
|
@ -240,21 +187,18 @@ class TMDb:
|
|||
else: raise Failed(f"TMDb Error: {method} method not supported")
|
||||
for tmdb_item in tmdb_items:
|
||||
try:
|
||||
ids.append(tmdb_item.id if is_movie else self.config.Convert.tmdb_to_tvdb(tmdb_item.id, fail=True))
|
||||
count += 1
|
||||
ids.append((tmdb_item.id, "tmdb" if is_movie else "tmdb_show"))
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
pass
|
||||
if count == amount: break
|
||||
if count == amount: break
|
||||
if len(ids) == amount: break
|
||||
if len(ids) == amount: break
|
||||
return ids
|
||||
|
||||
def _discover(self, attrs, amount, is_movie):
|
||||
ids = []
|
||||
count = 0
|
||||
for date_attr in discover_dates:
|
||||
if date_attr in attrs:
|
||||
attrs[date_attr] = datetime.strftime(datetime.strptime(attrs[date_attr], "%m/%d/%Y"), "%Y-%m-%d")
|
||||
attrs[date_attr] = util.validate_date(attrs[date_attr], f"tmdb_discover attribute {date_attr}", return_as="%Y-%m-%d")
|
||||
self.Discover.discover_movies(attrs) if is_movie else self.Discover.discover_tv_shows(attrs)
|
||||
total_pages = int(self.TMDb.total_pages)
|
||||
total_results = int(self.TMDb.total_results)
|
||||
|
@ -264,24 +208,24 @@ class TMDb:
|
|||
tmdb_items = self.Discover.discover_movies(attrs) if is_movie else self.Discover.discover_tv_shows(attrs)
|
||||
for tmdb_item in tmdb_items:
|
||||
try:
|
||||
ids.append(tmdb_item.id if is_movie else self.config.Convert.tmdb_to_tvdb(tmdb_item.id, fail=True))
|
||||
count += 1
|
||||
ids.append((tmdb_item.id, "tmdb" if is_movie else "tmdb_show"))
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
pass
|
||||
if count == amount: break
|
||||
if count == amount: break
|
||||
if len(ids) == amount: break
|
||||
if len(ids) == amount: break
|
||||
return ids, amount
|
||||
|
||||
def validate_tmdb_list(self, tmdb_list, tmdb_type):
|
||||
def validate_tmdb_ids(self, tmdb_ids, tmdb_method):
|
||||
tmdb_list = util.get_int_list(tmdb_ids, f"TMDb {type_map[tmdb_method]} ID")
|
||||
tmdb_values = []
|
||||
for tmdb_id in tmdb_list:
|
||||
try: tmdb_values.append(self.validate_tmdb(tmdb_id, tmdb_type))
|
||||
try: tmdb_values.append(self.validate_tmdb(tmdb_id, tmdb_method))
|
||||
except Failed as e: logger.error(e)
|
||||
if len(tmdb_values) == 0: raise Failed(f"TMDb Error: No valid TMDb IDs in {tmdb_list}")
|
||||
return tmdb_values
|
||||
|
||||
def validate_tmdb(self, tmdb_id, tmdb_type):
|
||||
def validate_tmdb(self, tmdb_id, tmdb_method):
|
||||
tmdb_type = type_map[tmdb_method]
|
||||
if tmdb_type == "Movie": self.get_movie(tmdb_id)
|
||||
elif tmdb_type == "Show": self.get_show(tmdb_id)
|
||||
elif tmdb_type == "Collection": self.get_collection(tmdb_id)
|
||||
|
@ -291,11 +235,10 @@ class TMDb:
|
|||
elif tmdb_type == "List": self.get_list(tmdb_id)
|
||||
return tmdb_id
|
||||
|
||||
def get_items(self, method, data, is_movie):
|
||||
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||
def get_tmdb_ids(self, method, data, is_movie):
|
||||
pretty = method.replace("_", " ").title().replace("Tmdb", "TMDb")
|
||||
media_type = "Movie" if is_movie else "Show"
|
||||
movie_ids = []
|
||||
show_ids = []
|
||||
ids = []
|
||||
if method in ["tmdb_discover", "tmdb_company", "tmdb_keyword"] or (method == "tmdb_network" and not is_movie):
|
||||
attrs = None
|
||||
tmdb_id = ""
|
||||
|
@ -315,8 +258,7 @@ class TMDb:
|
|||
else:
|
||||
attrs = data.copy()
|
||||
limit = int(attrs.pop("limit"))
|
||||
if is_movie: movie_ids, amount = self._discover(attrs, limit, is_movie)
|
||||
else: show_ids, amount = self._discover(attrs, limit, is_movie)
|
||||
ids, amount = self._discover(attrs, limit, is_movie)
|
||||
if method in ["tmdb_company", "tmdb_network", "tmdb_keyword"]:
|
||||
logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({amount} {media_type}{'' if amount == 1 else 's'})")
|
||||
elif method == "tmdb_discover":
|
||||
|
@ -324,8 +266,7 @@ class TMDb:
|
|||
for attr, value in attrs.items():
|
||||
logger.info(f" {attr}: {value}")
|
||||
elif method in ["tmdb_popular", "tmdb_top_rated", "tmdb_now_playing", "tmdb_trending_daily", "tmdb_trending_weekly"]:
|
||||
if is_movie: movie_ids = self._pagenation(method, data, is_movie)
|
||||
else: show_ids = self._pagenation(method, data, is_movie)
|
||||
ids = self._pagenation(method, data, is_movie)
|
||||
logger.info(f"Processing {pretty}: {data} {media_type}{'' if data == 1 else 's'}")
|
||||
else:
|
||||
tmdb_id = int(data)
|
||||
|
@ -334,34 +275,31 @@ class TMDb:
|
|||
tmdb_name = tmdb_list.name
|
||||
for tmdb_item in tmdb_list.items:
|
||||
if tmdb_item.media_type == "movie":
|
||||
movie_ids.append(tmdb_item.id)
|
||||
ids.append((tmdb_item.id, "tmdb"))
|
||||
elif tmdb_item.media_type == "tv":
|
||||
try: show_ids.append(self.config.Convert.tmdb_to_tvdb(tmdb_item.id, fail=True))
|
||||
except Failed: pass
|
||||
try:
|
||||
ids.append((tmdb_item.id, "tmdb_show"))
|
||||
except Failed:
|
||||
pass
|
||||
elif method == "tmdb_movie":
|
||||
tmdb_name = str(self.get_movie(tmdb_id).title)
|
||||
movie_ids.append(tmdb_id)
|
||||
ids.append((tmdb_id, "tmdb"))
|
||||
elif method == "tmdb_collection":
|
||||
tmdb_items = self.get_collection(tmdb_id)
|
||||
tmdb_name = str(tmdb_items.name)
|
||||
for tmdb_item in tmdb_items.parts:
|
||||
movie_ids.append(tmdb_item["id"])
|
||||
ids.append((tmdb_item["id"], "tmdb"))
|
||||
elif method == "tmdb_show":
|
||||
tmdb_name = str(self.get_show(tmdb_id).name)
|
||||
show_ids.append(self.config.Convert.tmdb_to_tvdb(tmdb_id, fail=True))
|
||||
ids.append((tmdb_id, "tmdb_show"))
|
||||
else:
|
||||
tmdb_name = str(self.get_person(tmdb_id).name)
|
||||
if method == "tmdb_actor": movie_ids, show_ids = self._credits(tmdb_id, actor=True)
|
||||
elif method == "tmdb_director": movie_ids, show_ids = self._credits(tmdb_id, director=True)
|
||||
elif method == "tmdb_producer": movie_ids, show_ids = self._credits(tmdb_id, producer=True)
|
||||
elif method == "tmdb_writer": movie_ids, show_ids = self._credits(tmdb_id, writer=True)
|
||||
elif method == "tmdb_crew": movie_ids, show_ids = self._credits(tmdb_id, crew=True)
|
||||
if method == "tmdb_actor": ids = self._credits(tmdb_id, actor=True)
|
||||
elif method == "tmdb_director": ids = self._credits(tmdb_id, director=True)
|
||||
elif method == "tmdb_producer": ids = self._credits(tmdb_id, producer=True)
|
||||
elif method == "tmdb_writer": ids = self._credits(tmdb_id, writer=True)
|
||||
elif method == "tmdb_crew": ids = self._credits(tmdb_id, crew=True)
|
||||
else: raise Failed(f"TMDb Error: Method {method} not supported")
|
||||
if len(movie_ids) > 0:
|
||||
logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({len(movie_ids)} Movie{'' if len(movie_ids) == 1 else 's'})")
|
||||
if not is_movie and len(show_ids) > 0:
|
||||
logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({len(show_ids)} Show{'' if len(show_ids) == 1 else 's'})")
|
||||
logger.debug("")
|
||||
logger.debug(f"{len(movie_ids)} TMDb IDs Found: {movie_ids}")
|
||||
logger.debug(f"{len(show_ids)} TVDb IDs Found: {show_ids}")
|
||||
return movie_ids, show_ids
|
||||
if len(ids) > 0:
|
||||
logger.info(f"Processing {pretty}: ({tmdb_id}) {tmdb_name} ({len(ids)} Item{'' if len(ids) == 1 else 's'})")
|
||||
return ids
|
||||
|
|
223
modules/trakt.py
Normal file
223
modules/trakt.py
Normal file
|
@ -0,0 +1,223 @@
|
|||
import logging, requests, webbrowser
|
||||
from modules import util
|
||||
from modules.util import Failed, TimeoutExpired
|
||||
from ruamel import yaml
|
||||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
redirect_uri = "urn:ietf:wg:oauth:2.0:oob"
|
||||
redirect_uri_encoded = redirect_uri.replace(":", "%3A")
|
||||
base_url = "https://api.trakt.tv"
|
||||
builders = [
|
||||
"trakt_collected", "trakt_collection", "trakt_list", "trakt_list_details", "trakt_popular",
|
||||
"trakt_recommended", "trakt_trending", "trakt_watched", "trakt_watchlist"
|
||||
]
|
||||
sorts = [
|
||||
"rank", "added", "title", "released", "runtime", "popularity",
|
||||
"percentage", "votes", "random", "my_rating", "watched", "collected"
|
||||
]
|
||||
|
||||
class Trakt:
|
||||
def __init__(self, config, params):
|
||||
self.config = config
|
||||
self.client_id = params["client_id"]
|
||||
self.client_secret = params["client_secret"]
|
||||
self.config_path = params["config_path"]
|
||||
self.authorization = params["authorization"]
|
||||
if not self._save(self.authorization):
|
||||
if not self._refresh():
|
||||
self._authorization()
|
||||
|
||||
def _authorization(self):
|
||||
url = f"https://trakt.tv/oauth/authorize?response_type=code&client_id={self.client_id}&redirect_uri={redirect_uri_encoded}"
|
||||
logger.info(f"Navigate to: {url}")
|
||||
logger.info("If you get an OAuth error your client_id or client_secret is invalid")
|
||||
webbrowser.open(url, new=2)
|
||||
try: pin = util.logger_input("Trakt pin (case insensitive)", timeout=300).strip()
|
||||
except TimeoutExpired: raise Failed("Input Timeout: Trakt pin required.")
|
||||
if not pin: raise Failed("Trakt Error: No input Trakt pin required.")
|
||||
json = {
|
||||
"code": pin,
|
||||
"client_id": self.client_id,
|
||||
"client_secret": self.client_secret,
|
||||
"redirect_uri": redirect_uri,
|
||||
"grant_type": "authorization_code"
|
||||
}
|
||||
response = self.config.post(f"{base_url}/oauth/token", json=json, headers={"Content-Type": "application/json"})
|
||||
if response.status_code != 200:
|
||||
raise Failed("Trakt Error: Invalid trakt pin. If you're sure you typed it in correctly your client_id or client_secret may be invalid")
|
||||
elif not self._save(response.json()):
|
||||
raise Failed("Trakt Error: New Authorization Failed")
|
||||
|
||||
def _check(self, authorization=None):
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": f"Bearer {self.authorization['access_token'] if authorization is None else authorization['access_token']}",
|
||||
"trakt-api-version": "2",
|
||||
"trakt-api-key": self.client_id
|
||||
}
|
||||
response = self.config.get(f"{base_url}/users/settings", headers=headers)
|
||||
return response.status_code == 200
|
||||
|
||||
def _refresh(self):
|
||||
if self.authorization and "refresh_token" in self.authorization and self.authorization["refresh_token"]:
|
||||
logger.info("Refreshing Access Token...")
|
||||
json = {
|
||||
"refresh_token": self.authorization["refresh_token"],
|
||||
"client_id": self.client_id,
|
||||
"client_secret": self.client_secret,
|
||||
"redirect_uri": redirect_uri,
|
||||
"grant_type": "refresh_token"
|
||||
}
|
||||
response = self.config.post(f"{base_url}/oauth/token", json=json, headers={"Content-Type": "application/json"})
|
||||
if response.status_code != 200:
|
||||
return False
|
||||
return self._save(response.json())
|
||||
return False
|
||||
|
||||
def _save(self, authorization):
|
||||
if authorization and self._check(authorization):
|
||||
if self.authorization != authorization:
|
||||
yaml.YAML().allow_duplicate_keys = True
|
||||
config, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.config_path))
|
||||
config["trakt"]["authorization"] = {
|
||||
"access_token": authorization["access_token"],
|
||||
"token_type": authorization["token_type"],
|
||||
"expires_in": authorization["expires_in"],
|
||||
"refresh_token": authorization["refresh_token"],
|
||||
"scope": authorization["scope"],
|
||||
"created_at": authorization["created_at"]
|
||||
}
|
||||
logger.info(f"Saving authorization information to {self.config_path}")
|
||||
yaml.round_trip_dump(config, open(self.config_path, "w"), indent=ind, block_seq_indent=bsi)
|
||||
self.authorization = authorization
|
||||
return True
|
||||
return False
|
||||
|
||||
def _request(self, url):
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": f"Bearer {self.authorization['access_token']}",
|
||||
"trakt-api-version": "2",
|
||||
"trakt-api-key": self.client_id
|
||||
}
|
||||
output_json = []
|
||||
pages = 1
|
||||
current = 1
|
||||
while current <= pages:
|
||||
if pages == 1:
|
||||
response = self.config.get(f"{base_url}{url}", headers=headers)
|
||||
if "X-Pagination-Page-Count" in response.headers and "?" not in url:
|
||||
pages = int(response.headers["X-Pagination-Page-Count"])
|
||||
else:
|
||||
response = self.config.get(f"{base_url}{url}?page={current}", headers=headers)
|
||||
if response.status_code == 200:
|
||||
json_data = response.json()
|
||||
if isinstance(json_data, dict):
|
||||
return json_data
|
||||
else:
|
||||
output_json.extend(response.json())
|
||||
else:
|
||||
raise Failed(f"({response.status_code}) {response.reason}")
|
||||
current += 1
|
||||
return output_json
|
||||
|
||||
def user_ratings(self, is_movie):
|
||||
media = "movie" if is_movie else "show"
|
||||
id_type = "tmdb" if is_movie else "tvdb"
|
||||
return {int(i[media]["ids"][id_type]): i["rating"] for i in self._request(f"/users/me/ratings/{media}s")}
|
||||
|
||||
def convert(self, external_id, from_source, to_source, media_type):
|
||||
path = f"/search/{from_source}/{external_id}"
|
||||
if from_source in ["tmdb", "tvdb"]:
|
||||
path = f"{path}?type={media_type}"
|
||||
lookup = self._request(path)
|
||||
if lookup and media_type in lookup[0] and to_source in lookup[0][media_type]["ids"]:
|
||||
return lookup[0][media_type]["ids"][to_source]
|
||||
raise Failed(f"Trakt Error: No {to_source.upper().replace('B', 'b')} ID found for {from_source.upper().replace('B', 'b')} ID: {external_id}")
|
||||
|
||||
def list_description(self, data):
|
||||
try:
|
||||
return self._request(requests.utils.urlparse(data).path)["description"]
|
||||
except Failed:
|
||||
raise Failed(f"Trakt Error: List {data} not found")
|
||||
|
||||
def _parse(self, items, top=True, item_type=None):
|
||||
ids = []
|
||||
for item in items:
|
||||
if top:
|
||||
if item_type:
|
||||
data = item[item_type]
|
||||
elif item["type"] in ["movie", "show"]:
|
||||
data = item[item["type"]]
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
data = item
|
||||
if item_type:
|
||||
id_type = "TMDb" if item_type == "movie" else "TVDb"
|
||||
else:
|
||||
id_type = "TMDb" if item["type"] == "movie" else "TVDb"
|
||||
if data["ids"][id_type.lower()]:
|
||||
ids.append((data["ids"][id_type.lower()], id_type.lower()))
|
||||
else:
|
||||
logger.error(f"Trakt Error: No {id_type} ID found for {data['title']} ({data['year']})")
|
||||
return ids
|
||||
|
||||
def _user_list(self, data):
|
||||
try:
|
||||
items = self._request(f"{requests.utils.urlparse(data).path}/items")
|
||||
except Failed:
|
||||
raise Failed(f"Trakt Error: List {data} not found")
|
||||
if len(items) == 0:
|
||||
raise Failed(f"Trakt Error: List {data} is empty")
|
||||
return self._parse(items)
|
||||
|
||||
def _user_items(self, list_type, data, is_movie):
|
||||
try:
|
||||
items = self._request(f"/users/{data}/{list_type}/{'movies' if is_movie else 'shows'}")
|
||||
except Failed:
|
||||
raise Failed(f"Trakt Error: User {data} not found")
|
||||
if len(items) == 0:
|
||||
raise Failed(f"Trakt Error: {data}'s {list_type.capitalize()} is empty")
|
||||
return self._parse(items, item_type="movie" if is_movie else "show")
|
||||
|
||||
def _pagenation(self, pagenation, amount, is_movie):
|
||||
items = self._request(f"/{'movies' if is_movie else 'shows'}/{pagenation}?limit={amount}")
|
||||
return self._parse(items, top=pagenation != "popular", item_type="movie" if is_movie else "show")
|
||||
|
||||
def validate_trakt(self, trakt_lists, is_movie, trakt_type="list"):
|
||||
values = util.get_list(trakt_lists, split=False)
|
||||
trakt_values = []
|
||||
for value in values:
|
||||
try:
|
||||
if trakt_type == "list":
|
||||
self._user_list(value)
|
||||
else:
|
||||
self._user_items(trakt_type, value, is_movie)
|
||||
trakt_values.append(value)
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
if len(trakt_values) == 0:
|
||||
if trakt_type == "watchlist":
|
||||
raise Failed(f"Trakt Error: No valid Trakt Watchlists in {values}")
|
||||
elif trakt_type == "collection":
|
||||
raise Failed(f"Trakt Error: No valid Trakt Collections in {values}")
|
||||
else:
|
||||
raise Failed(f"Trakt Error: No valid Trakt Lists in {values}")
|
||||
return trakt_values
|
||||
|
||||
def get_trakt_ids(self, method, data, is_movie):
|
||||
pretty = method.replace("_", " ").title()
|
||||
media_type = "Movie" if is_movie else "Show"
|
||||
if method in ["trakt_trending", "trakt_popular", "trakt_recommended", "trakt_watched", "trakt_collected"]:
|
||||
logger.info(f"Processing {pretty}: {data} {media_type}{'' if data == 1 else 's'}")
|
||||
return self._pagenation(method[6:], data, is_movie)
|
||||
elif method in ["trakt_collection", "trakt_watchlist"]:
|
||||
logger.info(f"Processing {pretty} {media_type}s for {data}")
|
||||
return self._user_items(method[6:], data, is_movie)
|
||||
elif method == "trakt_list":
|
||||
logger.info(f"Processing {pretty}: {data}")
|
||||
return self._user_list(data)
|
||||
else:
|
||||
raise Failed(f"Trakt Error: Method {method} not supported")
|
|
@ -1,186 +0,0 @@
|
|||
import logging, requests, webbrowser
|
||||
from modules import util
|
||||
from modules.util import Failed, TimeoutExpired
|
||||
from retrying import retry
|
||||
from ruamel import yaml
|
||||
from trakt import Trakt as TraktAPI
|
||||
from trakt.objects.episode import Episode
|
||||
from trakt.objects.movie import Movie
|
||||
from trakt.objects.season import Season
|
||||
from trakt.objects.show import Show
|
||||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
builders = [
|
||||
"trakt_collected",
|
||||
"trakt_collection",
|
||||
"trakt_list",
|
||||
"trakt_list_details",
|
||||
"trakt_popular",
|
||||
"trakt_recommended",
|
||||
"trakt_trending",
|
||||
"trakt_watched",
|
||||
"trakt_watchlist"
|
||||
]
|
||||
|
||||
class Trakt:
|
||||
def __init__(self, params, authorization=None):
|
||||
self.base_url = "https://api.trakt.tv"
|
||||
self.redirect_uri = "urn:ietf:wg:oauth:2.0:oob"
|
||||
self.aliases = {
|
||||
"trakt_trending": "Trakt Trending",
|
||||
"trakt_watchlist": "Trakt Watchlist",
|
||||
"trakt_list": "Trakt List"
|
||||
}
|
||||
self.client_id = params["client_id"]
|
||||
self.client_secret = params["client_secret"]
|
||||
self.config_path = params["config_path"]
|
||||
self.authorization = authorization
|
||||
TraktAPI.configuration.defaults.client(self.client_id, self.client_secret)
|
||||
if not self._save(self.authorization):
|
||||
if not self._refresh():
|
||||
self._authorization()
|
||||
|
||||
def _authorization(self):
|
||||
url = TraktAPI["oauth"].authorize_url(self.redirect_uri)
|
||||
logger.info(f"Navigate to: {url}")
|
||||
logger.info("If you get an OAuth error your client_id or client_secret is invalid")
|
||||
webbrowser.open(url, new=2)
|
||||
try: pin = util.logger_input("Trakt pin (case insensitive)", timeout=300).strip()
|
||||
except TimeoutExpired: raise Failed("Input Timeout: Trakt pin required.")
|
||||
if not pin: raise Failed("Trakt Error: No input Trakt pin required.")
|
||||
new_authorization = TraktAPI["oauth"].token(pin, self.redirect_uri)
|
||||
if not new_authorization:
|
||||
raise Failed("Trakt Error: Invalid trakt pin. If you're sure you typed it in correctly your client_id or client_secret may be invalid")
|
||||
if not self._save(new_authorization):
|
||||
raise Failed("Trakt Error: New Authorization Failed")
|
||||
|
||||
def _check(self, authorization):
|
||||
try:
|
||||
with TraktAPI.configuration.oauth.from_response(authorization, refresh=True):
|
||||
if TraktAPI["users/settings"].get():
|
||||
return True
|
||||
except ValueError: pass
|
||||
return False
|
||||
|
||||
def _refresh(self):
|
||||
if self.authorization and "refresh_token" in self.authorization and self.authorization["refresh_token"]:
|
||||
logger.info("Refreshing Access Token...")
|
||||
refreshed_authorization = TraktAPI["oauth"].token_refresh(self.authorization["refresh_token"], self.redirect_uri)
|
||||
return self._save(refreshed_authorization)
|
||||
return False
|
||||
|
||||
def _save(self, authorization):
|
||||
if authorization and self._check(authorization):
|
||||
if self.authorization != authorization:
|
||||
yaml.YAML().allow_duplicate_keys = True
|
||||
config, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.config_path))
|
||||
config["trakt"]["authorization"] = {
|
||||
"access_token": authorization["access_token"],
|
||||
"token_type": authorization["token_type"],
|
||||
"expires_in": authorization["expires_in"],
|
||||
"refresh_token": authorization["refresh_token"],
|
||||
"scope": authorization["scope"],
|
||||
"created_at": authorization["created_at"]
|
||||
}
|
||||
logger.info(f"Saving authorization information to {self.config_path}")
|
||||
yaml.round_trip_dump(config, open(self.config_path, "w"), indent=ind, block_seq_indent=bsi)
|
||||
self.authorization = authorization
|
||||
TraktAPI.configuration.defaults.oauth.from_response(self.authorization)
|
||||
return True
|
||||
return False
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||
def convert(self, external_id, from_source, to_source, media_type):
|
||||
lookup = TraktAPI["search"].lookup(external_id, from_source, media_type)
|
||||
if lookup:
|
||||
lookup = lookup[0] if isinstance(lookup, list) else lookup
|
||||
if lookup.get_key(to_source):
|
||||
return lookup.get_key(to_source) if to_source == "imdb" else int(lookup.get_key(to_source))
|
||||
raise Failed(f"Trakt Error: No {to_source.upper().replace('B', 'b')} ID found for {from_source.upper().replace('B', 'b')} ID: {external_id}")
|
||||
|
||||
def collection(self, data, is_movie):
|
||||
return self._user_list("collection", data, is_movie)
|
||||
|
||||
def _watchlist(self, data, is_movie):
|
||||
return self._user_list("watchlist", data, is_movie)
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||
def _user_list(self, list_type, data, is_movie):
|
||||
items = TraktAPI[f"users/{data}/{list_type}"].movies() if is_movie else TraktAPI[f"users/{data}/{list_type}"].shows()
|
||||
if items is None: raise Failed("Trakt Error: No List found")
|
||||
else: return [i for i in items]
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||
def standard_list(self, data):
|
||||
try: trakt_list = TraktAPI[requests.utils.urlparse(data).path].get()
|
||||
except AttributeError: trakt_list = None
|
||||
if trakt_list is None: raise Failed("Trakt Error: No List found")
|
||||
else: return trakt_list
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||
def _request(self, url):
|
||||
return requests.get(url, headers={"Content-Type": "application/json", "trakt-api-version": "2", "trakt-api-key": self.client_id}).json()
|
||||
|
||||
def _collection(self, username, is_movie):
|
||||
items = self._request(f"{self.base_url}/users/{username}/collection/{'movies' if is_movie else 'shows'}")
|
||||
if is_movie: return [item["movie"]["ids"]["tmdb"] for item in items], []
|
||||
else: return [], [item["show"]["ids"]["tvdb"] for item in items]
|
||||
|
||||
def _pagenation(self, pagenation, amount, is_movie):
|
||||
items = self._request(f"{self.base_url}/{'movies' if is_movie else 'shows'}/{pagenation}?limit={amount}")
|
||||
if pagenation == "popular" and is_movie: return [item["ids"]["tmdb"] for item in items], []
|
||||
elif pagenation == "popular": return [], [item["ids"]["tvdb"] for item in items]
|
||||
elif is_movie: return [item["movie"]["ids"]["tmdb"] for item in items], []
|
||||
else: return [], [item["show"]["ids"]["tvdb"] for item in items]
|
||||
|
||||
def validate_trakt(self, values, trakt_type=None, is_movie=None):
|
||||
trakt_values = []
|
||||
for value in values:
|
||||
try:
|
||||
if trakt_type == "watchlist" and is_movie is not None:
|
||||
self._watchlist(value, is_movie)
|
||||
elif trakt_type == "collection" and is_movie is not None:
|
||||
self._collection(value, is_movie)
|
||||
else:
|
||||
self.standard_list(value)
|
||||
trakt_values.append(value)
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
if len(trakt_values) == 0:
|
||||
if trakt_type == "watchlist" and is_movie is not None:
|
||||
raise Failed(f"Trakt Error: No valid Trakt Watchlists in {values}")
|
||||
elif trakt_type == "collection" and is_movie is not None:
|
||||
raise Failed(f"Trakt Error: No valid Trakt Collections in {values}")
|
||||
else:
|
||||
raise Failed(f"Trakt Error: No valid Trakt Lists in {values}")
|
||||
return trakt_values
|
||||
|
||||
def get_items(self, method, data, is_movie):
|
||||
pretty = self.aliases[method] if method in self.aliases else method
|
||||
media_type = "Movie" if is_movie else "Show"
|
||||
if method in ["trakt_trending", "trakt_popular", "trakt_recommended", "trakt_watched", "trakt_collected"]:
|
||||
movie_ids, show_ids = self._pagenation(method[6:], data, is_movie)
|
||||
logger.info(f"Processing {pretty}: {data} {media_type}{'' if data == 1 else 's'}")
|
||||
elif method == "trakt_collection":
|
||||
movie_ids, show_ids = self._collection(data, is_movie)
|
||||
logger.info(f"Processing {pretty} {media_type}s for {data}")
|
||||
else:
|
||||
show_ids = []
|
||||
movie_ids = []
|
||||
if method == "trakt_watchlist": trakt_items = self._watchlist(data, is_movie)
|
||||
elif method == "trakt_list": trakt_items = self.standard_list(data).items()
|
||||
else: raise Failed(f"Trakt Error: Method {method} not supported")
|
||||
logger.info(f"Processing {pretty}: {data}")
|
||||
for trakt_item in trakt_items:
|
||||
if isinstance(trakt_item, Movie):
|
||||
movie_ids.append(int(trakt_item.get_key("tmdb")))
|
||||
elif isinstance(trakt_item, Show) and trakt_item.pk[1] not in show_ids:
|
||||
show_ids.append(int(trakt_item.pk[1]))
|
||||
elif (isinstance(trakt_item, (Season, Episode))) and trakt_item.show.pk[1] not in show_ids:
|
||||
show_ids.append(int(trakt_item.show.pk[1]))
|
||||
logger.debug(f"Trakt {media_type} Found: {trakt_items}")
|
||||
logger.debug("")
|
||||
logger.debug(f"{len(movie_ids)} TMDb IDs Found: {movie_ids}")
|
||||
logger.debug(f"{len(show_ids)} TVDb IDs Found: {show_ids}")
|
||||
return movie_ids, show_ids
|
188
modules/tvdb.py
188
modules/tvdb.py
|
@ -1,169 +1,163 @@
|
|||
import logging, requests
|
||||
from lxml import html
|
||||
import logging, requests, time
|
||||
from modules import util
|
||||
from modules.util import Failed
|
||||
from retrying import retry
|
||||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
builders = [
|
||||
"tvdb_list",
|
||||
"tvdb_list_details",
|
||||
"tvdb_movie",
|
||||
"tvdb_movie_details",
|
||||
"tvdb_show",
|
||||
"tvdb_show_details"
|
||||
]
|
||||
builders = ["tvdb_list", "tvdb_list_details", "tvdb_movie", "tvdb_movie_details", "tvdb_show", "tvdb_show_details"]
|
||||
base_url = "https://www.thetvdb.com"
|
||||
alt_url = "https://thetvdb.com"
|
||||
urls = {
|
||||
"list": f"{base_url}/lists/", "alt_list": f"{alt_url}/lists/",
|
||||
"series": f"{base_url}/series/", "alt_series": f"{alt_url}/series/",
|
||||
"movies": f"{base_url}/movies/", "alt_movies": f"{alt_url}/movies/",
|
||||
"series_id": f"{base_url}/dereferrer/series/", "movie_id": f"{base_url}/dereferrer/movie/"
|
||||
}
|
||||
|
||||
class TVDbObj:
|
||||
def __init__(self, tvdb_url, language, is_movie, TVDb):
|
||||
tvdb_url = tvdb_url.strip()
|
||||
if not is_movie and tvdb_url.startswith((TVDb.series_url, TVDb.alt_series_url, TVDb.series_id_url)):
|
||||
def __init__(self, tvdb_url, language, is_movie, config):
|
||||
self.tvdb_url = tvdb_url.strip()
|
||||
self.language = language
|
||||
self.is_movie = is_movie
|
||||
self.config = config
|
||||
if not self.is_movie and self.tvdb_url.startswith((urls["series"], urls["alt_series"], urls["series_id"])):
|
||||
self.media_type = "Series"
|
||||
elif is_movie and tvdb_url.startswith((TVDb.movies_url, TVDb.alt_movies_url, TVDb.movie_id_url)):
|
||||
elif self.is_movie and self.tvdb_url.startswith((urls["movies"], urls["alt_movies"], urls["movie_id"])):
|
||||
self.media_type = "Movie"
|
||||
else:
|
||||
raise Failed(f"TVDb Error: {tvdb_url} must begin with {TVDb.movies_url if is_movie else TVDb.series_url}")
|
||||
raise Failed(f"TVDb Error: {self.tvdb_url} must begin with {urls['movies'] if self.is_movie else urls['series']}")
|
||||
|
||||
response = TVDb._request(tvdb_url, language)
|
||||
response = self.config.get_html(self.tvdb_url, headers=util.header(self.language))
|
||||
results = response.xpath(f"//*[text()='TheTVDB.com {self.media_type} ID']/parent::node()/span/text()")
|
||||
if len(results) > 0:
|
||||
self.id = int(results[0])
|
||||
elif tvdb_url.startswith(TVDb.movie_id_url):
|
||||
raise Failed(f"TVDb Error: Could not find a TVDb Movie using TVDb Movie ID: {tvdb_url[len(TVDb.movie_id_url):]}")
|
||||
elif tvdb_url.startswith(TVDb.series_id_url):
|
||||
raise Failed(f"TVDb Error: Could not find a TVDb Series using TVDb Series ID: {tvdb_url[len(TVDb.series_id_url):]}")
|
||||
elif self.tvdb_url.startswith(urls["movie_id"]):
|
||||
raise Failed(f"TVDb Error: Could not find a TVDb Movie using TVDb Movie ID: {self.tvdb_url[len(urls['movie_id']):]}")
|
||||
elif self.tvdb_url.startswith(urls["series_id"]):
|
||||
raise Failed(f"TVDb Error: Could not find a TVDb Series using TVDb Series ID: {self.tvdb_url[len(urls['series_id']):]}")
|
||||
else:
|
||||
raise Failed(f"TVDb Error: Could not find a TVDb {self.media_type} ID at the URL {tvdb_url}")
|
||||
raise Failed(f"TVDb Error: Could not find a TVDb {self.media_type} ID at the URL {self.tvdb_url}")
|
||||
|
||||
results = response.xpath("//div[@class='change_translation_text' and @data-language='eng']/@data-title")
|
||||
if len(results) > 0 and len(results[0]) > 0:
|
||||
self.title = results[0]
|
||||
def parse_page(xpath, fail=None, multi=False):
|
||||
parse_results = response.xpath(xpath)
|
||||
if len(parse_results) > 0:
|
||||
parse_results = [r.strip() for r in parse_results if len(r) > 0]
|
||||
if not multi and len(parse_results) > 0:
|
||||
return parse_results[0]
|
||||
elif len(parse_results) > 0:
|
||||
return parse_results
|
||||
elif fail is not None:
|
||||
raise Failed(f"TVDb Error: {fail} not found from TVDb URL: {self.tvdb_url}")
|
||||
else:
|
||||
return None
|
||||
|
||||
self.title = parse_page("//div[@class='change_translation_text' and not(@style='display:none')]/@data-title", fail="Name")
|
||||
self.poster_path = parse_page("//div[@class='row hidden-xs hidden-sm']/div/img/@src")
|
||||
self.background_path = parse_page("(//h2[@class='mt-4' and text()='Backgrounds']/following::div/a/@href)[1]")
|
||||
self.summary = parse_page("//div[@class='change_translation_text' and not(@style='display:none')]/p/text()[normalize-space()]")
|
||||
if self.is_movie:
|
||||
self.directors = parse_page("//strong[text()='Directors']/parent::li/span/a/text()[normalize-space()]")
|
||||
self.writers = parse_page("//strong[text()='Writers']/parent::li/span/a/text()[normalize-space()]")
|
||||
self.studios = parse_page("//strong[text()='Studio']/parent::li/span/a/text()[normalize-space()]")
|
||||
else:
|
||||
raise Failed(f"TVDb Error: Name not found from TVDb URL: {tvdb_url}")
|
||||
|
||||
results = response.xpath("//div[@class='row hidden-xs hidden-sm']/div/img/@src")
|
||||
self.poster_path = results[0] if len(results) > 0 and len(results[0]) > 0 else None
|
||||
|
||||
results = response.xpath("(//h2[@class='mt-4' and text()='Backgrounds']/following::div/a/@href)[1]")
|
||||
self.background_path = results[0] if len(results) > 0 and len(results[0]) > 0 else None
|
||||
|
||||
results = response.xpath("//div[@class='block']/div[not(@style='display:none')]/p/text()")
|
||||
self.summary = results[0] if len(results) > 0 and len(results[0]) > 0 else None
|
||||
self.networks = parse_page("//strong[text()='Networks']/parent::li/span/a/text()[normalize-space()]")
|
||||
self.genres = parse_page("//strong[text()='Genres']/parent::li/span/a/text()[normalize-space()]")
|
||||
|
||||
tmdb_id = None
|
||||
if is_movie:
|
||||
imdb_id = None
|
||||
if self.is_movie:
|
||||
results = response.xpath("//*[text()='TheMovieDB.com']/@href")
|
||||
if len(results) > 0:
|
||||
try:
|
||||
tmdb_id = util.regex_first_int(results[0], "TMDb ID")
|
||||
except Failed:
|
||||
pass
|
||||
if tmdb_id is None:
|
||||
results = response.xpath("//*[text()='IMDB']/@href")
|
||||
if len(results) > 0:
|
||||
try:
|
||||
tmdb_id = TVDb.config.Convert.imdb_to_tmdb(util.get_id_from_imdb_url(results[0]), fail=True)
|
||||
except Failed:
|
||||
pass
|
||||
if tmdb_id is None:
|
||||
raise Failed(f"TVDB Error: No TMDb ID found for {self.title}")
|
||||
results = response.xpath("//*[text()='IMDB']/@href")
|
||||
if len(results) > 0:
|
||||
try:
|
||||
imdb_id = util.get_id_from_imdb_url(results[0])
|
||||
except Failed:
|
||||
pass
|
||||
if tmdb_id is None and imdb_id is None:
|
||||
raise Failed(f"TVDB Error: No TMDb ID or IMDb ID found for {self.title}")
|
||||
self.tmdb_id = tmdb_id
|
||||
self.tvdb_url = tvdb_url
|
||||
self.language = language
|
||||
self.is_movie = is_movie
|
||||
self.TVDb = TVDb
|
||||
self.imdb_id = imdb_id
|
||||
|
||||
class TVDb:
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.site_url = "https://www.thetvdb.com"
|
||||
self.alt_site_url = "https://thetvdb.com"
|
||||
self.list_url = f"{self.site_url}/lists/"
|
||||
self.alt_list_url = f"{self.alt_site_url}/lists/"
|
||||
self.series_url = f"{self.site_url}/series/"
|
||||
self.alt_series_url = f"{self.alt_site_url}/series/"
|
||||
self.movies_url = f"{self.site_url}/movies/"
|
||||
self.alt_movies_url = f"{self.alt_site_url}/movies/"
|
||||
self.series_id_url = f"{self.site_url}/dereferrer/series/"
|
||||
self.movie_id_url = f"{self.site_url}/dereferrer/movie/"
|
||||
|
||||
def get_movie_or_series(self, language, tvdb_url, is_movie):
|
||||
def get_item(self, language, tvdb_url, is_movie):
|
||||
return self.get_movie(language, tvdb_url) if is_movie else self.get_series(language, tvdb_url)
|
||||
|
||||
def get_series(self, language, tvdb_url):
|
||||
try:
|
||||
tvdb_url = f"{self.series_id_url}{int(tvdb_url)}"
|
||||
tvdb_url = f"{urls['series_id']}{int(tvdb_url)}"
|
||||
except ValueError:
|
||||
pass
|
||||
return TVDbObj(tvdb_url, language, False, self)
|
||||
return TVDbObj(tvdb_url, language, False, self.config)
|
||||
|
||||
def get_movie(self, language, tvdb_url):
|
||||
try:
|
||||
tvdb_url = f"{self.movie_id_url}{int(tvdb_url)}"
|
||||
tvdb_url = f"{urls['movie_id']}{int(tvdb_url)}"
|
||||
except ValueError:
|
||||
pass
|
||||
return TVDbObj(tvdb_url, language, True, self)
|
||||
return TVDbObj(tvdb_url, language, True, self.config)
|
||||
|
||||
def get_list_description(self, tvdb_url, language):
|
||||
description = self._request(tvdb_url, language).xpath("//div[@class='block']/div[not(@style='display:none')]/p/text()")
|
||||
response = self.config.get_html(tvdb_url, headers=util.header(language))
|
||||
description = response.xpath("//div[@class='block']/div[not(@style='display:none')]/p/text()")
|
||||
return description[0] if len(description) > 0 and len(description[0]) > 0 else ""
|
||||
|
||||
def _ids_from_url(self, tvdb_url, language):
|
||||
show_ids = []
|
||||
movie_ids = []
|
||||
ids = []
|
||||
tvdb_url = tvdb_url.strip()
|
||||
if tvdb_url.startswith((self.list_url, self.alt_list_url)):
|
||||
if tvdb_url.startswith((urls["list"], urls["alt_list"])):
|
||||
try:
|
||||
items = self._request(tvdb_url, language).xpath("//div[@class='col-xs-12 col-sm-12 col-md-8 col-lg-8 col-md-pull-4']/div[@class='row']")
|
||||
response = self.config.get_html(tvdb_url, headers=util.header(language))
|
||||
items = response.xpath("//div[@class='col-xs-12 col-sm-12 col-md-8 col-lg-8 col-md-pull-4']/div[@class='row']")
|
||||
for item in items:
|
||||
title = item.xpath(".//div[@class='col-xs-12 col-sm-9 mt-2']//a/text()")[0]
|
||||
item_url = item.xpath(".//div[@class='col-xs-12 col-sm-9 mt-2']//a/@href")[0]
|
||||
if item_url.startswith("/series/"):
|
||||
try:
|
||||
show_ids.append(self.get_series(language, f"{self.site_url}{item_url}").id)
|
||||
ids.append((self.get_series(language, f"{base_url}{item_url}").id, "tvdb"))
|
||||
except Failed as e:
|
||||
logger.error(f"{e} for series {title}")
|
||||
elif item_url.startswith("/movies/"):
|
||||
try:
|
||||
tmdb_id = self.get_movie(language, f"{self.site_url}{item_url}").tmdb_id
|
||||
if tmdb_id:
|
||||
movie_ids.append(tmdb_id)
|
||||
else:
|
||||
raise Failed(f"TVDb Error: TMDb ID not found from TVDb URL: {tvdb_url}")
|
||||
movie = self.get_movie(language, f"{base_url}{item_url}")
|
||||
if movie.tmdb_id:
|
||||
ids.append((movie.tmdb_id, "tmdb"))
|
||||
elif movie.imdb_id:
|
||||
ids.append((movie.imdb_id, "imdb"))
|
||||
except Failed as e:
|
||||
logger.error(f"{e} for series {title}")
|
||||
logger.error(e)
|
||||
else:
|
||||
logger.error(f"TVDb Error: Skipping Movie: {title}")
|
||||
if len(show_ids) > 0 or len(movie_ids) > 0:
|
||||
return movie_ids, show_ids
|
||||
time.sleep(2)
|
||||
if len(ids) > 0:
|
||||
return ids
|
||||
raise Failed(f"TVDb Error: No TVDb IDs found at {tvdb_url}")
|
||||
except requests.exceptions.MissingSchema:
|
||||
util.print_stacktrace()
|
||||
raise Failed(f"TVDb Error: URL Lookup Failed for {tvdb_url}")
|
||||
else:
|
||||
raise Failed(f"TVDb Error: {tvdb_url} must begin with {self.list_url}")
|
||||
raise Failed(f"TVDb Error: {tvdb_url} must begin with {urls['list']}")
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||
def _request(self, url, language):
|
||||
return html.fromstring(requests.get(url, headers={"Accept-Language": language}).content)
|
||||
|
||||
def get_items(self, method, data, language):
|
||||
pretty = util.pretty_names[method] if method in util.pretty_names else method
|
||||
show_ids = []
|
||||
movie_ids = []
|
||||
logger.info(f"Processing {pretty}: {data}")
|
||||
def get_tvdb_ids(self, method, data, language):
|
||||
if method == "tvdb_show":
|
||||
show_ids.append(self.get_series(language, data).id)
|
||||
logger.info(f"Processing TVDb Show: {data}")
|
||||
return [(self.get_series(language, data).id, "tvdb")]
|
||||
elif method == "tvdb_movie":
|
||||
movie_ids.append(self.get_movie(language, data).tmdb_id)
|
||||
logger.info(f"Processing TVDb Movie: {data}")
|
||||
movie = self.get_movie(language, data)
|
||||
if movie.tmdb_id:
|
||||
return [(movie.tmdb_id, "tmdb")]
|
||||
elif movie.imdb_id:
|
||||
return [(movie.imdb_id, "imdb")]
|
||||
elif method == "tvdb_list":
|
||||
tmdb_ids, tvdb_ids = self._ids_from_url(data, language)
|
||||
movie_ids.extend(tmdb_ids)
|
||||
show_ids.extend(tvdb_ids)
|
||||
logger.info(f"Processing TVDb List: {data}")
|
||||
return self._ids_from_url(data, language)
|
||||
else:
|
||||
raise Failed(f"TVDb Error: Method {method} not supported")
|
||||
logger.debug("")
|
||||
logger.debug(f"{len(movie_ids)} TMDb IDs Found: {movie_ids}")
|
||||
logger.debug(f"{len(show_ids)} TVDb IDs Found: {show_ids}")
|
||||
return movie_ids, show_ids
|
||||
|
|
358
modules/util.py
358
modules/util.py
|
@ -1,5 +1,6 @@
|
|||
import logging, os, re, signal, sys, time, traceback
|
||||
from datetime import datetime
|
||||
import glob, logging, os, re, signal, sys, time, traceback
|
||||
from datetime import datetime, timedelta
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from pathvalidate import is_valid_filename, sanitize_filename
|
||||
from plexapi.exceptions import BadRequest, NotFound, Unauthorized
|
||||
|
||||
|
@ -47,184 +48,30 @@ days_alias = {
|
|||
"saturday": 5, "sat": 5, "s": 5,
|
||||
"sunday": 6, "sun": 6, "su": 6, "u": 6
|
||||
}
|
||||
pretty_days = {
|
||||
0: "Monday",
|
||||
1: "Tuesday",
|
||||
2: "Wednesday",
|
||||
3: "Thursday",
|
||||
4: "Friday",
|
||||
5: "Saturday",
|
||||
6: "Sunday"
|
||||
mod_displays = {
|
||||
"": "is", ".not": "is not", ".begins": "begins with", ".ends": "ends with", ".before": "is before", ".after": "is after",
|
||||
".gt": "is greater than", ".gte": "is greater than or equal", ".lt": "is less than", ".lte": "is less than or equal"
|
||||
}
|
||||
pretty_days = {0: "Monday", 1: "Tuesday", 2: "Wednesday", 3: "Thursday", 4: "Friday", 5: "Saturday", 6: "Sunday"}
|
||||
pretty_months = {
|
||||
1: "January",
|
||||
2: "February",
|
||||
3: "March",
|
||||
4: "April",
|
||||
5: "May",
|
||||
6: "June",
|
||||
7: "July",
|
||||
8: "August",
|
||||
9: "September",
|
||||
10: "October",
|
||||
11: "November",
|
||||
12: "December"
|
||||
}
|
||||
pretty_seasons = {
|
||||
"winter": "Winter",
|
||||
"spring": "Spring",
|
||||
"summer": "Summer",
|
||||
"fall": "Fall"
|
||||
}
|
||||
pretty_names = {
|
||||
"anidb_id": "AniDB ID",
|
||||
"anidb_relation": "AniDB Relation",
|
||||
"anidb_popular": "AniDB Popular",
|
||||
"anilist_genre": "AniList Genre",
|
||||
"anilist_id": "AniList ID",
|
||||
"anilist_popular": "AniList Popular",
|
||||
"anilist_relations": "AniList Relations",
|
||||
"anilist_season": "AniList Season",
|
||||
"anilist_studio": "AniList Studio",
|
||||
"anilist_tag": "AniList Tag",
|
||||
"anilist_top_rated": "AniList Top Rated",
|
||||
"icheckmovies_list": "I Check Movies List",
|
||||
"imdb_list": "IMDb List",
|
||||
"imdb_id": "IMDb ID",
|
||||
"letterboxd_list": "Letterboxd List",
|
||||
"letterboxd_list_details": "Letterboxd List",
|
||||
"mal_id": "MyAnimeList ID",
|
||||
"mal_all": "MyAnimeList All",
|
||||
"mal_airing": "MyAnimeList Airing",
|
||||
"mal_upcoming": "MyAnimeList Upcoming",
|
||||
"mal_tv": "MyAnimeList TV",
|
||||
"mal_ova": "MyAnimeList OVA",
|
||||
"mal_movie": "MyAnimeList Movie",
|
||||
"mal_special": "MyAnimeList Special",
|
||||
"mal_popular": "MyAnimeList Popular",
|
||||
"mal_favorite": "MyAnimeList Favorite",
|
||||
"mal_season": "MyAnimeList Season",
|
||||
"mal_suggested": "MyAnimeList Suggested",
|
||||
"mal_userlist": "MyAnimeList Userlist",
|
||||
"plex_all": "Plex All",
|
||||
"plex_collection": "Plex Collection",
|
||||
"plex_search": "Plex Search",
|
||||
"tautulli_popular": "Tautulli Popular",
|
||||
"tautulli_watched": "Tautulli Watched",
|
||||
"tmdb_actor": "TMDb Actor",
|
||||
"tmdb_actor_details": "TMDb Actor",
|
||||
"tmdb_collection": "TMDb Collection",
|
||||
"tmdb_collection_details": "TMDb Collection",
|
||||
"tmdb_company": "TMDb Company",
|
||||
"tmdb_crew": "TMDb Crew",
|
||||
"tmdb_crew_details": "TMDb Crew",
|
||||
"tmdb_director": "TMDb Director",
|
||||
"tmdb_director_details": "TMDb Director",
|
||||
"tmdb_discover": "TMDb Discover",
|
||||
"tmdb_keyword": "TMDb Keyword",
|
||||
"tmdb_list": "TMDb List",
|
||||
"tmdb_list_details": "TMDb List",
|
||||
"tmdb_movie": "TMDb Movie",
|
||||
"tmdb_movie_details": "TMDb Movie",
|
||||
"tmdb_network": "TMDb Network",
|
||||
"tmdb_now_playing": "TMDb Now Playing",
|
||||
"tmdb_person": "TMDb Person",
|
||||
"tmdb_popular": "TMDb Popular",
|
||||
"tmdb_producer": "TMDb Producer",
|
||||
"tmdb_producer_details": "TMDb Producer",
|
||||
"tmdb_show": "TMDb Show",
|
||||
"tmdb_show_details": "TMDb Show",
|
||||
"tmdb_top_rated": "TMDb Top Rated",
|
||||
"tmdb_trending_daily": "TMDb Trending Daily",
|
||||
"tmdb_trending_weekly": "TMDb Trending Weekly",
|
||||
"tmdb_writer": "TMDb Writer",
|
||||
"tmdb_writer_details": "TMDb Writer",
|
||||
"trakt_collected": "Trakt Collected",
|
||||
"trakt_collection": "Trakt Collection",
|
||||
"trakt_list": "Trakt List",
|
||||
"trakt_list_details": "Trakt List",
|
||||
"trakt_popular": "Trakt Popular",
|
||||
"trakt_recommended": "Trakt Recommended",
|
||||
"trakt_trending": "Trakt Trending",
|
||||
"trakt_watched": "Trakt Watched",
|
||||
"trakt_watchlist": "Trakt Watchlist",
|
||||
"tvdb_list": "TVDb List",
|
||||
"tvdb_list_details": "TVDb List",
|
||||
"tvdb_movie": "TVDb Movie",
|
||||
"tvdb_movie_details": "TVDb Movie",
|
||||
"tvdb_show": "TVDb Show",
|
||||
"tvdb_show_details": "TVDb Show"
|
||||
}
|
||||
pretty_ids = {
|
||||
"anidbid": "AniDB",
|
||||
"imdbid": "IMDb",
|
||||
"mal_id": "MyAnimeList",
|
||||
"themoviedb_id": "TMDb",
|
||||
"thetvdb_id": "TVDb",
|
||||
"tvdbid": "TVDb"
|
||||
1: "January", 2: "February", 3: "March", 4: "April", 5: "May", 6: "June",
|
||||
7: "July", 8: "August", 9: "September", 10: "October", 11: "November", 12: "December"
|
||||
}
|
||||
seasons = ["winter", "spring", "summer", "fall"]
|
||||
pretty_ids = {"anidbid": "AniDB", "imdbid": "IMDb", "mal_id": "MyAnimeList", "themoviedb_id": "TMDb", "thetvdb_id": "TVDb", "tvdbid": "TVDb"}
|
||||
|
||||
def tab_new_lines(data):
|
||||
return str(data).replace("\n", "\n|\t ") if "\n" in str(data) else str(data)
|
||||
|
||||
def make_ordinal(n):
|
||||
n = int(n)
|
||||
suffix = ["th", "st", "nd", "rd", "th"][min(n % 10, 4)]
|
||||
if 11 <= (n % 100) <= 13:
|
||||
suffix = "th"
|
||||
return str(n) + suffix
|
||||
return f"{n}{'th' if 11 <= (n % 100) <= 13 else ['th', 'st', 'nd', 'rd', 'th'][min(n % 10, 4)]}"
|
||||
|
||||
def choose_from_list(datalist, description, data=None, list_type="title", exact=False):
|
||||
if len(datalist) > 0:
|
||||
if len(datalist) == 1 and (description != "collection" or datalist[0].title == data):
|
||||
return datalist[0]
|
||||
zero_option = f"Create New Collection: {data}" if description == "collection" else "Do Nothing"
|
||||
message = f"Multiple {description}s Found\n0) {zero_option}"
|
||||
for i, d in enumerate(datalist, 1):
|
||||
if list_type == "title":
|
||||
if d.title == data:
|
||||
return d
|
||||
message += f"\n{i}) {d.title}"
|
||||
else:
|
||||
message += f"\n{i}) [{d[0]}] {d[1]}"
|
||||
if exact:
|
||||
return None
|
||||
print_multiline(message, info=True)
|
||||
while True:
|
||||
try:
|
||||
selection = int(logger_input(f"Choose {description} number")) - 1
|
||||
if selection >= 0: return datalist[selection]
|
||||
elif selection == -1: return None
|
||||
else: logger.info(f"Invalid {description} number")
|
||||
except IndexError: logger.info(f"Invalid {description} number")
|
||||
except TimeoutExpired:
|
||||
if list_type == "title":
|
||||
logger.warning(f"Input Timeout: using {data}")
|
||||
return None
|
||||
else:
|
||||
logger.warning(f"Input Timeout: using {datalist[0][1]}")
|
||||
return datalist[0]
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_bool(method_name, method_data):
|
||||
if isinstance(method_data, bool):
|
||||
return method_data
|
||||
elif str(method_data).lower() in ["t", "true"]:
|
||||
return True
|
||||
elif str(method_data).lower() in ["f", "false"]:
|
||||
return False
|
||||
else:
|
||||
raise Failed(f"Collection Error: {method_name} attribute: {method_data} invalid must be either true or false")
|
||||
|
||||
def compile_list(data):
|
||||
if isinstance(data, list):
|
||||
text = ""
|
||||
for item in data:
|
||||
text += f"{',' if len(text) > 0 else ''}{item}"
|
||||
return text
|
||||
else:
|
||||
return data
|
||||
def add_dict_list(keys, value, dict_map):
|
||||
for key in keys:
|
||||
if key in dict_map:
|
||||
dict_map[key].append(value)
|
||||
else:
|
||||
dict_map[key] = [value]
|
||||
|
||||
def get_list(data, lower=False, split=True, int_list=False):
|
||||
if data is None: return None
|
||||
|
@ -236,50 +83,25 @@ def get_list(data, lower=False, split=True, int_list=False):
|
|||
else: return [d.strip() for d in str(data).split(",")]
|
||||
|
||||
def get_int_list(data, id_type):
|
||||
values = get_list(data)
|
||||
int_values = []
|
||||
for value in values:
|
||||
for value in get_list(data):
|
||||
try: int_values.append(regex_first_int(value, id_type))
|
||||
except Failed as e: logger.error(e)
|
||||
return int_values
|
||||
|
||||
def get_year_list(data, current_year, method):
|
||||
final_years = []
|
||||
values = get_list(data)
|
||||
for value in values:
|
||||
final_years.append(check_year(value, current_year, method))
|
||||
return final_years
|
||||
|
||||
def check_year(year, current_year, method):
|
||||
return check_number(year, method, minimum=1800, maximum=current_year)
|
||||
|
||||
def check_number(value, method, number_type="int", minimum=None, maximum=None):
|
||||
if number_type == "int":
|
||||
try: num_value = int(str(value))
|
||||
except ValueError: raise Failed(f"Collection Error: {method}: {value} must be an integer")
|
||||
elif number_type == "float":
|
||||
try: num_value = float(str(value))
|
||||
except ValueError: raise Failed(f"Collection Error: {method}: {value} must be a number")
|
||||
else: raise Failed(f"Number Type: {number_type} invalid")
|
||||
if minimum is not None and maximum is not None and (num_value < minimum or num_value > maximum):
|
||||
raise Failed(f"Collection Error: {method}: {num_value} must be between {minimum} and {maximum}")
|
||||
elif minimum is not None and num_value < minimum:
|
||||
raise Failed(f"Collection Error: {method}: {num_value} is less then {minimum}")
|
||||
elif maximum is not None and num_value > maximum:
|
||||
raise Failed(f"Collection Error: {method}: {num_value} is greater then {maximum}")
|
||||
else:
|
||||
return num_value
|
||||
|
||||
def check_date(date_text, method, return_string=False, plex_date=False):
|
||||
try: date_obg = datetime.strptime(str(date_text), "%Y-%m-%d" if plex_date else "%m/%d/%Y")
|
||||
except ValueError: raise Failed(f"Collection Error: {method}: {date_text} must match pattern {'YYYY-MM-DD e.g. 2020-12-25' if plex_date else 'MM/DD/YYYY e.g. 12/25/2020'}")
|
||||
return str(date_text) if return_string else date_obg
|
||||
def validate_date(date_text, method, return_as=None):
|
||||
try: date_obg = datetime.strptime(str(date_text), "%Y-%m-%d" if "-" in str(date_text) else "%m/%d/%Y")
|
||||
except ValueError: raise Failed(f"Collection Error: {method}: {date_text} must match pattern YYYY-MM-DD (e.g. 2020-12-25) or MM/DD/YYYY (e.g. 12/25/2020)")
|
||||
return datetime.strftime(date_obg, return_as) if return_as else date_obg
|
||||
|
||||
def logger_input(prompt, timeout=60):
|
||||
if windows: return windows_input(prompt, timeout)
|
||||
elif hasattr(signal, "SIGALRM"): return unix_input(prompt, timeout)
|
||||
else: raise SystemError("Input Timeout not supported on this system")
|
||||
|
||||
def header(language="en-US,en;q=0.5"):
|
||||
return {"Accept-Language": language, "User-Agent": "Mozilla/5.0 x64"}
|
||||
|
||||
def alarm_handler(signum, frame):
|
||||
raise TimeoutExpired
|
||||
|
||||
|
@ -291,22 +113,6 @@ def unix_input(prompt, timeout=60):
|
|||
except EOFError: raise Failed("Input Failed")
|
||||
finally: signal.alarm(0)
|
||||
|
||||
def old_windows_input(prompt, timeout=60, timer=time.monotonic):
|
||||
prompt = f"| {prompt}: "
|
||||
sys.stdout.write(prompt)
|
||||
sys.stdout.flush()
|
||||
endtime = timer() + timeout
|
||||
result = []
|
||||
while timer() < endtime:
|
||||
if msvcrt.kbhit():
|
||||
result.append(msvcrt.getwche())
|
||||
if result[-1] == "\n":
|
||||
out = "".join(result[:-1])
|
||||
logger.debug(f"{prompt[2:]}{out}")
|
||||
return out
|
||||
time.sleep(0.04)
|
||||
raise TimeoutExpired
|
||||
|
||||
def windows_input(prompt, timeout=5):
|
||||
sys.stdout.write(f"| {prompt}: ")
|
||||
sys.stdout.flush()
|
||||
|
@ -393,7 +199,7 @@ def separator(text=None, space=True, border=True, debug=False):
|
|||
|
||||
def apply_formatter(handler, border=True):
|
||||
text = f"| %(message)-{screen_width - 2}s |" if border else f"%(message)-{screen_width - 2}s"
|
||||
if isinstance(handler, logging.handlers.RotatingFileHandler):
|
||||
if isinstance(handler, RotatingFileHandler):
|
||||
text = f"[%(asctime)s] %(filename)-27s %(levelname)-10s {text}"
|
||||
handler.setFormatter(logging.Formatter(text))
|
||||
|
||||
|
@ -429,9 +235,117 @@ def is_locked(filepath):
|
|||
file_object = open(filepath, 'a', 8)
|
||||
if file_object:
|
||||
locked = False
|
||||
except IOError as message:
|
||||
except IOError:
|
||||
locked = True
|
||||
finally:
|
||||
if file_object:
|
||||
file_object.close()
|
||||
return locked
|
||||
|
||||
def glob_filter(filter_in):
|
||||
filter_in = filter_in.translate({ord("["): "[[]", ord("]"): "[]]"}) if "[" in filter_in else filter_in
|
||||
return glob.glob(filter_in)
|
||||
|
||||
def is_date_filter(value, modifier, data, final, current_time):
|
||||
if value is None:
|
||||
return True
|
||||
if modifier in ["", ".not"]:
|
||||
threshold_date = current_time - timedelta(days=data)
|
||||
if (modifier == "" and (value is None or value < threshold_date)) \
|
||||
or (modifier == ".not" and value and value >= threshold_date):
|
||||
return True
|
||||
elif modifier in [".before", ".after"]:
|
||||
filter_date = validate_date(data, final)
|
||||
if (modifier == ".before" and value >= filter_date) or (modifier == ".after" and value <= filter_date):
|
||||
return True
|
||||
elif modifier == ".regex":
|
||||
jailbreak = True
|
||||
for check_data in data:
|
||||
if re.compile(check_data).match(value.strftime("%m/%d/%Y")):
|
||||
jailbreak = True
|
||||
break
|
||||
if not jailbreak:
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_number_filter(value, modifier, data):
|
||||
return value is None or (modifier == ".gt" and value <= data) \
|
||||
or (modifier == ".gte" and value < data) \
|
||||
or (modifier == ".lt" and value >= data) \
|
||||
or (modifier == ".lte" and value > data)
|
||||
|
||||
def is_string_filter(values, modifier, data):
|
||||
jailbreak = False
|
||||
for value in values:
|
||||
for check_value in data:
|
||||
if (modifier in ["", ".not"] and check_value.lower() in value.lower()) \
|
||||
or (modifier == ".begins" and value.lower().startswith(check_value.lower())) \
|
||||
or (modifier == ".ends" and value.lower().endswith(check_value.lower())) \
|
||||
or (modifier == ".regex" and re.compile(check_value).match(value)):
|
||||
jailbreak = True
|
||||
break
|
||||
if jailbreak: break
|
||||
return (jailbreak and modifier == ".not") or (not jailbreak and modifier in ["", ".begins", ".ends", ".regex"])
|
||||
|
||||
def parse(attribute, data, datatype=None, methods=None, parent=None, default=None, options=None, translation=None, minimum=1, maximum=None, regex=None):
|
||||
display = f"{parent + ' ' if parent else ''}{attribute} attribute"
|
||||
if options is None and translation is not None:
|
||||
options = [o for o in translation]
|
||||
value = data[methods[attribute]] if methods and attribute in methods else data
|
||||
|
||||
if datatype == "list":
|
||||
if methods and attribute in methods and data[methods[attribute]]:
|
||||
return [v for v in value if v] if isinstance(value, list) else [str(value)]
|
||||
return []
|
||||
elif datatype == "dictlist":
|
||||
final_list = []
|
||||
for dict_data in get_list(value):
|
||||
if isinstance(dict_data, dict):
|
||||
final_list.append((dict_data, {dm.lower(): dm for dm in dict_data}))
|
||||
else:
|
||||
raise Failed(f"Collection Error: {display} {dict_data} is not a dictionary")
|
||||
return final_list
|
||||
elif methods and attribute not in methods:
|
||||
message = f"{display} not found"
|
||||
elif value is None:
|
||||
message = f"{display} is blank"
|
||||
elif regex is not None:
|
||||
regex_str, example = regex
|
||||
if re.compile(regex_str).match(str(value)):
|
||||
return str(value)
|
||||
else:
|
||||
message = f"{display}: {value} must match pattern {regex_str} e.g. {example}"
|
||||
elif datatype == "bool":
|
||||
if isinstance(value, bool):
|
||||
return value
|
||||
elif isinstance(value, int):
|
||||
return value > 0
|
||||
elif str(value).lower() in ["t", "true"]:
|
||||
return True
|
||||
elif str(value).lower() in ["f", "false"]:
|
||||
return False
|
||||
else:
|
||||
message = f"{display} must be either true or false"
|
||||
elif datatype in ["int", "float"]:
|
||||
try:
|
||||
value = int(str(value)) if datatype == "int" else float(str(value))
|
||||
if (maximum is None and minimum <= value) or (maximum is not None and minimum <= value <= maximum):
|
||||
return value
|
||||
except ValueError:
|
||||
pass
|
||||
pre = f"{display} {value} must {'an integer' if datatype == 'int' else 'a number'}"
|
||||
if maximum is None:
|
||||
message = f"{pre} {minimum} or greater"
|
||||
else:
|
||||
message = f"{pre} between {minimum} and {maximum}"
|
||||
elif (translation is not None and str(value).lower() not in translation) or \
|
||||
(options is not None and translation is None and str(value).lower() not in options):
|
||||
message = f"{display} {value} must be in {', '.join([str(o) for o in options])}"
|
||||
else:
|
||||
return translation[value] if translation is not None else value
|
||||
|
||||
if default is None:
|
||||
raise Failed(f"Collection Error: {message}")
|
||||
else:
|
||||
logger.warning(f"Collection Warning: {message} using {default} as default")
|
||||
return translation[default] if translation is not None else default
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import argparse, logging, os, re, sys, time
|
||||
from datetime import datetime
|
||||
from logging.handlers import RotatingFileHandler
|
||||
try:
|
||||
import schedule
|
||||
from modules import util
|
||||
|
@ -26,6 +27,7 @@ parser.add_argument("-lo", "--library-only", "--libraries-only", dest="library_o
|
|||
parser.add_argument("-rc", "-cl", "--collection", "--collections", "--run-collection", "--run-collections", dest="collections", help="Process only specified collections (comma-separated list)", type=str)
|
||||
parser.add_argument("-rl", "-l", "--library", "--libraries", "--run-library", "--run-libraries", dest="libraries", help="Process only specified libraries (comma-separated list)", type=str)
|
||||
parser.add_argument("-nc", "--no-countdown", dest="no_countdown", help="Run without displaying the countdown", action="store_true", default=False)
|
||||
parser.add_argument("-nm", "--no-missing", dest="no_missing", help="Run without running the missing section", action="store_true", default=False)
|
||||
parser.add_argument("-d", "--divider", dest="divider", help="Character that divides the sections (Default: '=')", default="=", type=str)
|
||||
parser.add_argument("-w", "--width", dest="width", help="Screen Width (Default: 100)", default=100, type=int)
|
||||
args = parser.parse_args()
|
||||
|
@ -46,6 +48,7 @@ test = check_bool("PMM_TEST", args.test)
|
|||
debug = check_bool("PMM_DEBUG", args.debug)
|
||||
run = check_bool("PMM_RUN", args.run)
|
||||
no_countdown = check_bool("PMM_NO_COUNTDOWN", args.no_countdown)
|
||||
no_missing = check_bool("PMM_NO_MISSING", args.no_missing)
|
||||
library_only = check_bool("PMM_LIBRARIES_ONLY", args.library_only)
|
||||
collection_only = check_bool("PMM_COLLECTIONS_ONLY", args.collection_only)
|
||||
collections = os.environ.get("PMM_COLLECTIONS") if os.environ.get("PMM_COLLECTIONS") else args.collections
|
||||
|
@ -91,7 +94,7 @@ sys.excepthook = util.my_except_hook
|
|||
def start(config_path, is_test=False, time_scheduled=None, requested_collections=None, requested_libraries=None, resume_from=None):
|
||||
file_logger = os.path.join(default_dir, "logs", "meta.log")
|
||||
should_roll_over = os.path.isfile(file_logger)
|
||||
file_handler = logging.handlers.RotatingFileHandler(file_logger, delay=True, mode="w", backupCount=10, encoding="utf-8")
|
||||
file_handler = RotatingFileHandler(file_logger, delay=True, mode="w", backupCount=10, encoding="utf-8")
|
||||
util.apply_formatter(file_handler)
|
||||
file_handler.addFilter(fmt_filter)
|
||||
if should_roll_over:
|
||||
|
@ -105,7 +108,7 @@ def start(config_path, is_test=False, time_scheduled=None, requested_collections
|
|||
logger.info(util.centered("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | "))
|
||||
logger.info(util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| "))
|
||||
logger.info(util.centered(" |___/ "))
|
||||
logger.info(util.centered(" Version: 1.11.3 "))
|
||||
logger.info(util.centered(" Version: 1.12.0 "))
|
||||
if time_scheduled: start_type = f"{time_scheduled} "
|
||||
elif is_test: start_type = "Test "
|
||||
elif requested_collections: start_type = "Collections "
|
||||
|
@ -132,7 +135,7 @@ def update_libraries(config):
|
|||
os.makedirs(os.path.join(default_dir, "logs", library.mapping_name, "collections"), exist_ok=True)
|
||||
col_file_logger = os.path.join(default_dir, "logs", library.mapping_name, "library.log")
|
||||
should_roll_over = os.path.isfile(col_file_logger)
|
||||
library_handler = logging.handlers.RotatingFileHandler(col_file_logger, delay=True, mode="w", backupCount=3, encoding="utf-8")
|
||||
library_handler = RotatingFileHandler(col_file_logger, delay=True, mode="w", backupCount=3, encoding="utf-8")
|
||||
util.apply_formatter(library_handler)
|
||||
if should_roll_over:
|
||||
library_handler.doRollover()
|
||||
|
@ -144,15 +147,15 @@ def update_libraries(config):
|
|||
logger.info("")
|
||||
util.separator(f"Mapping {library.name} Library", space=False, border=False)
|
||||
logger.info("")
|
||||
library.map_guids()
|
||||
items = library.map_guids()
|
||||
if not config.test_mode and not config.resume_from and not collection_only and library.mass_update:
|
||||
mass_metadata(config, library)
|
||||
mass_metadata(config, library, items)
|
||||
for metadata in library.metadata_files:
|
||||
logger.info("")
|
||||
util.separator(f"Running Metadata File\n{metadata.path}")
|
||||
if not config.test_mode and not config.resume_from and not collection_only:
|
||||
try:
|
||||
metadata.update_metadata(config.TMDb, config.test_mode)
|
||||
metadata.update_metadata()
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
collections_to_run = metadata.get_collections(config.requested_collections)
|
||||
|
@ -166,6 +169,15 @@ def update_libraries(config):
|
|||
logger.removeHandler(library_handler)
|
||||
run_collection(config, library, metadata, collections_to_run)
|
||||
logger.addHandler(library_handler)
|
||||
if library.run_sort:
|
||||
logger.info("")
|
||||
util.separator(f"Sorting {library.name} Library's Collections", space=False, border=False)
|
||||
logger.info("")
|
||||
for builder in library.run_sort:
|
||||
logger.info("")
|
||||
util.separator(f"Sorting {builder.name} Collection", space=False, border=False)
|
||||
logger.info("")
|
||||
builder.sort_collection()
|
||||
|
||||
if not config.test_mode and not config.requested_collections and ((library.show_unmanaged and not library_only) or (library.assets_for_all and not collection_only)):
|
||||
logger.info("")
|
||||
|
@ -189,10 +201,10 @@ def update_libraries(config):
|
|||
util.separator(f"All {'Movies' if library.is_movie else 'Shows'} Assets Check for {library.name} Library", space=False, border=False)
|
||||
logger.info("")
|
||||
for col in unmanaged_collections:
|
||||
poster, background = library.find_collection_assets(col)
|
||||
poster, background = library.find_collection_assets(col, create=library.create_asset_folders)
|
||||
library.upload_images(col, poster=poster, background=background)
|
||||
for item in library.get_all():
|
||||
library.update_item_from_assets(item)
|
||||
library.update_item_from_assets(item, create=library.create_asset_folders)
|
||||
|
||||
logger.removeHandler(library_handler)
|
||||
|
||||
|
@ -214,7 +226,7 @@ def update_libraries(config):
|
|||
for library in config.libraries:
|
||||
if library.run_again:
|
||||
col_file_logger = os.path.join(default_dir, "logs", library.mapping_name, f"library.log")
|
||||
library_handler = logging.handlers.RotatingFileHandler(col_file_logger, mode="w", backupCount=3, encoding="utf-8")
|
||||
library_handler = RotatingFileHandler(col_file_logger, mode="w", backupCount=3, encoding="utf-8")
|
||||
util.apply_formatter(library_handler)
|
||||
logger.addHandler(library_handler)
|
||||
library_handler.addFilter(fmt_filter)
|
||||
|
@ -245,7 +257,7 @@ def update_libraries(config):
|
|||
if library.optimize:
|
||||
library.query(library.PlexServer.library.optimize)
|
||||
|
||||
def mass_metadata(config, library):
|
||||
def mass_metadata(config, library, items):
|
||||
logger.info("")
|
||||
util.separator(f"Mass Editing {'Movie' if library.is_movie else 'Show'} Library: {library.name}")
|
||||
logger.info("")
|
||||
|
@ -256,24 +268,45 @@ def mass_metadata(config, library):
|
|||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | Splitting"))
|
||||
radarr_adds = []
|
||||
sonarr_adds = []
|
||||
items = library.get_all()
|
||||
trakt_ratings = config.Trakt.user_ratings(library.is_movie) if library.mass_trakt_rating_update else []
|
||||
|
||||
for i, item in enumerate(items, 1):
|
||||
library.reload(item)
|
||||
try:
|
||||
library.reload(item)
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
continue
|
||||
util.print_return(f"Processing: {i}/{len(items)} {item.title}")
|
||||
tmdb_id = None
|
||||
tvdb_id = None
|
||||
imdb_id = None
|
||||
if config.Cache:
|
||||
t_id, guid_media_type, _ = config.Cache.query_guid_map(item.guid)
|
||||
t_id, i_id, guid_media_type, _ = config.Cache.query_guid_map(item.guid)
|
||||
if t_id:
|
||||
if "movie" in guid_media_type:
|
||||
tmdb_id = t_id
|
||||
tmdb_id = t_id[0]
|
||||
else:
|
||||
tvdb_id = t_id
|
||||
tvdb_id = t_id[0]
|
||||
if i_id:
|
||||
imdb_id = i_id[0]
|
||||
if not tmdb_id and not tvdb_id:
|
||||
tmdb_id = library.get_tmdb_from_map(item)
|
||||
if not tmdb_id and not tvdb_id and library.is_show:
|
||||
tmdb_id = library.get_tvdb_from_map(item)
|
||||
tvdb_id = library.get_tvdb_from_map(item)
|
||||
|
||||
if library.mass_trakt_rating_update:
|
||||
try:
|
||||
if library.is_movie and tmdb_id in trakt_ratings:
|
||||
new_rating = trakt_ratings[tmdb_id]
|
||||
elif library.is_show and tvdb_id in trakt_ratings:
|
||||
new_rating = trakt_ratings[tvdb_id]
|
||||
else:
|
||||
raise Failed
|
||||
if str(item.userRating) != str(new_rating):
|
||||
library.edit_query(item, {"userRating.value": new_rating, "userRating.locked": 1})
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | User Rating | {new_rating}"))
|
||||
except Failed:
|
||||
pass
|
||||
|
||||
if library.Radarr and library.radarr_add_all and tmdb_id:
|
||||
radarr_adds.append(tmdb_id)
|
||||
|
@ -288,7 +321,7 @@ def mass_metadata(config, library):
|
|||
try:
|
||||
tmdb_item = config.TMDb.get_movie(tmdb_id) if library.is_movie else config.TMDb.get_show(tmdb_id)
|
||||
except Failed as e:
|
||||
logger.info(util.adjust_space(str(e)))
|
||||
logger.error(util.adjust_space(str(e)))
|
||||
else:
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No TMDb ID for Guid: {item.guid}"))
|
||||
|
||||
|
@ -303,14 +336,24 @@ def mass_metadata(config, library):
|
|||
try:
|
||||
omdb_item = config.OMDb.get_omdb(imdb_id)
|
||||
except Failed as e:
|
||||
logger.info(util.adjust_space(str(e)))
|
||||
logger.error(util.adjust_space(str(e)))
|
||||
except Exception:
|
||||
logger.error(f"IMDb ID: {imdb_id}")
|
||||
raise
|
||||
else:
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}"))
|
||||
|
||||
if not tmdb_item and not omdb_item:
|
||||
tvdb_item = None
|
||||
if library.mass_genre_update == "tvdb":
|
||||
if tvdb_id:
|
||||
try:
|
||||
tvdb_item = config.TVDb.get_item(tvdb_id, library.is_movie)
|
||||
except Failed as e:
|
||||
logger.error(util.adjust_space(str(e)))
|
||||
else:
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No TVDb ID for Guid: {item.guid}"))
|
||||
|
||||
if not tmdb_item and not omdb_item and not tvdb_item:
|
||||
continue
|
||||
|
||||
if library.mass_genre_update:
|
||||
|
@ -319,31 +362,29 @@ def mass_metadata(config, library):
|
|||
new_genres = [genre.name for genre in tmdb_item.genres]
|
||||
elif omdb_item and library.mass_genre_update in ["omdb", "imdb"]:
|
||||
new_genres = omdb_item.genres
|
||||
elif tvdb_item and library.mass_genre_update == "tvdb":
|
||||
new_genres = tvdb_item.genres
|
||||
else:
|
||||
raise Failed
|
||||
item_genres = [genre.tag for genre in item.genres]
|
||||
display_str = ""
|
||||
add_genre = []
|
||||
for genre in (g for g in new_genres if g not in item_genres):
|
||||
add_genre.append(genre)
|
||||
display_str += f"{', ' if len(display_str) > 0 else ''}+{genre}"
|
||||
add_genre = [genre for genre in (g for g in new_genres if g not in item_genres)]
|
||||
if len(add_genre) > 0:
|
||||
display_str += f"+{', +'.join(add_genre)}"
|
||||
library.query_data(item.addGenre, add_genre)
|
||||
remove_genre = []
|
||||
for genre in (g for g in item_genres if g not in new_genres):
|
||||
remove_genre.append(genre)
|
||||
display_str += f"{', ' if len(display_str) > 0 else ''}-{genre}"
|
||||
remove_genre = [genre for genre in (g for g in item_genres if g not in new_genres)]
|
||||
if len(remove_genre) > 0:
|
||||
display_str += f"-{', -'.join(remove_genre)}"
|
||||
library.query_data(item.removeGenre, remove_genre)
|
||||
if len(display_str) > 0:
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | Genres | {display_str}"))
|
||||
except Failed:
|
||||
pass
|
||||
if library.mass_audience_rating_update or library.mass_critic_rating_update:
|
||||
if library.mass_audience_rating_update:
|
||||
try:
|
||||
if tmdb_item and library.mass_genre_update == "tmdb":
|
||||
if tmdb_item and library.mass_audience_rating_update == "tmdb":
|
||||
new_rating = tmdb_item.vote_average
|
||||
elif omdb_item and library.mass_genre_update in ["omdb", "imdb"]:
|
||||
elif omdb_item and library.mass_audience_rating_update in ["omdb", "imdb"]:
|
||||
new_rating = omdb_item.imdb_rating
|
||||
else:
|
||||
raise Failed
|
||||
|
@ -353,6 +394,19 @@ def mass_metadata(config, library):
|
|||
if library.mass_audience_rating_update and str(item.audienceRating) != str(new_rating):
|
||||
library.edit_query(item, {"audienceRating.value": new_rating, "audienceRating.locked": 1})
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | Audience Rating | {new_rating}"))
|
||||
except Failed:
|
||||
pass
|
||||
if library.mass_critic_rating_update:
|
||||
try:
|
||||
if tmdb_item and library.mass_critic_rating_update == "tmdb":
|
||||
new_rating = tmdb_item.vote_average
|
||||
elif omdb_item and library.mass_critic_rating_update in ["omdb", "imdb"]:
|
||||
new_rating = omdb_item.imdb_rating
|
||||
else:
|
||||
raise Failed
|
||||
if new_rating is None:
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No Rating Found"))
|
||||
else:
|
||||
if library.mass_critic_rating_update and str(item.rating) != str(new_rating):
|
||||
library.edit_query(item, {"rating.value": new_rating, "rating.locked": 1})
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | Critic Rating | {new_rating}"))
|
||||
|
@ -405,7 +459,7 @@ def run_collection(config, library, metadata, requested_collections):
|
|||
os.makedirs(collection_log_folder, exist_ok=True)
|
||||
col_file_logger = os.path.join(collection_log_folder, f"collection.log")
|
||||
should_roll_over = os.path.isfile(col_file_logger)
|
||||
collection_handler = logging.handlers.RotatingFileHandler(col_file_logger, delay=True, mode="w", backupCount=3, encoding="utf-8")
|
||||
collection_handler = RotatingFileHandler(col_file_logger, delay=True, mode="w", backupCount=3, encoding="utf-8")
|
||||
util.apply_formatter(collection_handler)
|
||||
if should_roll_over:
|
||||
collection_handler.doRollover()
|
||||
|
@ -420,7 +474,7 @@ def run_collection(config, library, metadata, requested_collections):
|
|||
|
||||
util.separator(f"Validating {mapping_name} Attributes", space=False, border=False)
|
||||
|
||||
builder = CollectionBuilder(config, library, metadata, mapping_name, collection_attrs)
|
||||
builder = CollectionBuilder(config, library, metadata, mapping_name, no_missing, collection_attrs)
|
||||
logger.info("")
|
||||
|
||||
util.separator(f"Building {mapping_name} Collection", space=False, border=False)
|
||||
|
@ -441,14 +495,14 @@ def run_collection(config, library, metadata, requested_collections):
|
|||
for filter_key, filter_value in builder.filters:
|
||||
logger.info(f"Collection Filter {filter_key}: {filter_value}")
|
||||
|
||||
builder.collect_rating_keys()
|
||||
builder.find_rating_keys()
|
||||
|
||||
if len(builder.rating_keys) > 0 and builder.build_collection:
|
||||
logger.info("")
|
||||
util.separator(f"Adding to {mapping_name} Collection", space=False, border=False)
|
||||
logger.info("")
|
||||
builder.add_to_collection()
|
||||
if len(builder.missing_movies) > 0 or len(builder.missing_shows) > 0:
|
||||
if builder.do_missing and (len(builder.missing_movies) > 0 or len(builder.missing_shows) > 0):
|
||||
if builder.details["show_missing"] is True:
|
||||
logger.info("")
|
||||
util.separator(f"Missing from Library", space=False, border=False)
|
||||
|
@ -463,9 +517,13 @@ def run_collection(config, library, metadata, requested_collections):
|
|||
logger.info("")
|
||||
builder.update_details()
|
||||
|
||||
logger.info("")
|
||||
util.separator(f"Updating Details of the Items in {mapping_name} Collection", space=False, border=False)
|
||||
logger.info("")
|
||||
if builder.custom_sort:
|
||||
library.run_sort.append(builder)
|
||||
# logger.info("")
|
||||
# util.separator(f"Sorting {mapping_name} Collection", space=False, border=False)
|
||||
# logger.info("")
|
||||
# builder.sort_collection()
|
||||
|
||||
builder.update_item_details()
|
||||
|
||||
if builder.run_again and (len(builder.run_again_movies) > 0 or len(builder.run_again_shows) > 0):
|
||||
|
@ -504,7 +562,7 @@ try:
|
|||
minutes = int((seconds % 3600) // 60)
|
||||
time_str = f"{hours} Hour{'s' if hours > 1 else ''} and " if hours > 0 else ""
|
||||
time_str += f"{minutes} Minute{'s' if minutes > 1 else ''}"
|
||||
util.print_return(f"Current Time: {current} | {time_str} until the next run at {og_time_str} {times_to_run}")
|
||||
util.print_return(f"Current Time: {current} | {time_str} until the next run at {og_time_str} | Runs: {', '.join(times_to_run)}")
|
||||
time.sleep(60)
|
||||
except KeyboardInterrupt:
|
||||
util.separator("Exiting Plex Meta Manager")
|
||||
|
|
|
@ -1,14 +1,10 @@
|
|||
# Remove
|
||||
# Less common, pinned
|
||||
PlexAPI==4.6.1
|
||||
tmdbv3api==1.7.5
|
||||
trakt.py==4.3.0
|
||||
arrapi==1.0.2
|
||||
# More common, flexible
|
||||
lxml
|
||||
requests>=2.4.2
|
||||
ruamel.yaml
|
||||
schedule
|
||||
retrying
|
||||
pathvalidate
|
||||
pillow
|
||||
PlexAPI==4.7.0
|
||||
tmdbv3api==1.7.6
|
||||
arrapi==1.1.2
|
||||
lxml==4.6.3
|
||||
requests==2.26.0
|
||||
ruamel.yaml==0.17.10
|
||||
schedule==1.1.0
|
||||
retrying==1.3.3
|
||||
pathvalidate==2.4.1
|
||||
pillow==8.3.1
|
Loading…
Add table
Reference in a new issue