mirror of
https://github.com/meisnate12/Plex-Meta-Manager
synced 2024-11-22 12:33:10 +00:00
add Folder option for metadata files and cleanup
This commit is contained in:
parent
26eb57587f
commit
0cc9d81283
6 changed files with 81 additions and 102 deletions
|
@ -9,22 +9,33 @@ builders = [
|
|||
"anilist_season", "anilist_studio", "anilist_tag", "anilist_top_rated"
|
||||
]
|
||||
pretty_names = {"score": "Average Score", "popular": "Popularity"}
|
||||
search_translation = {
|
||||
"season": "MediaSeason", "seasonYear": "Int", "isAdult": "Boolean",
|
||||
"startDate_greater": "FuzzyDateInt", "startDate_lesser": "FuzzyDateInt", "endDate_greater": "FuzzyDateInt", "endDate_lesser": "FuzzyDateInt",
|
||||
"format_in": "[MediaFormat]", "format_not_in": "[MediaFormat]", "status_in": "[MediaStatus]", "status_not_in": "[MediaStatus]",
|
||||
"episodes_greater": "Int", "episodes_lesser": "Int", "duration_greater": "Int", "duration_lesser": "Int",
|
||||
"genre_in": "[String]", "genre_not_in": "[String]", "tag_in": "[String]", "tag_not_in": "[String]",
|
||||
"averageScore_greater": "Int", "averageScore_lesser": "Int", "popularity_greater": "Int", "popularity_lesser": "Int"
|
||||
}
|
||||
base_url = "https://graphql.anilist.co"
|
||||
tag_query = "query{MediaTagCollection {name}}"
|
||||
tag_query = "query{MediaTagCollection {name, category}}"
|
||||
genre_query = "query{GenreCollection}"
|
||||
|
||||
class AniList:
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.tags = {}
|
||||
self.genres = {}
|
||||
self.tags = {t["name"].lower(): t["name"] for t in self._request(tag_query, {})["data"]["MediaTagCollection"]}
|
||||
self.genres = {g.lower(): g for g in self._request(genre_query, {})["data"]["GenreCollection"]}
|
||||
self.categories = {}
|
||||
for media_tag in self._request(tag_query, {})["data"]["MediaTagCollection"]:
|
||||
self.tags[media_tag["name"].lower().replace(" ", "-")] = media_tag["name"]
|
||||
self.categories[media_tag["category"].lower().replace(" ", "-")] = media_tag["category"]
|
||||
self.genres = {g.lower().replace(" ", "-"): g for g in self._request(genre_query, {})["data"]["GenreCollection"]}
|
||||
|
||||
def _request(self, query, variables):
|
||||
response = self.config.post(base_url, json={"query": query, "variables": variables})
|
||||
json_obj = response.json()
|
||||
if "errors" in json_obj:
|
||||
logger.debug(json_obj)
|
||||
if json_obj['errors'][0]['message'] == "Too Many Requests.":
|
||||
if "Retry-After" in response.headers:
|
||||
time.sleep(int(response.headers["Retry-After"]))
|
||||
|
@ -35,7 +46,7 @@ class AniList:
|
|||
time.sleep(0.4)
|
||||
return json_obj
|
||||
|
||||
def _validate(self, anilist_id):
|
||||
def _validate_id(self, anilist_id):
|
||||
query = "query ($id: Int) {Media(id: $id) {id title{romaji english}}}"
|
||||
media = self._request(query, {"id": anilist_id})["data"]["Media"]
|
||||
if media["id"]:
|
||||
|
@ -65,62 +76,31 @@ class AniList:
|
|||
return anilist_ids
|
||||
|
||||
def _top_rated(self, limit):
|
||||
query = """
|
||||
query ($page: Int) {
|
||||
Page(page: $page) {
|
||||
pageInfo {hasNextPage}
|
||||
media(averageScore_greater: 3, sort: SCORE_DESC, type: ANIME) {id}
|
||||
}
|
||||
}
|
||||
"""
|
||||
return self._pagenation(query, limit=limit)
|
||||
return self._search(limit=limit, averageScore_greater=3)
|
||||
|
||||
def _popular(self, limit):
|
||||
query = """
|
||||
query ($page: Int) {
|
||||
Page(page: $page) {
|
||||
pageInfo {hasNextPage}
|
||||
media(popularity_greater: 1000, sort: POPULARITY_DESC, type: ANIME) {id}
|
||||
}
|
||||
}
|
||||
"""
|
||||
return self._pagenation(query, limit=limit)
|
||||
return self._search(sort="popular", limit=limit, popularity_greater=1000)
|
||||
|
||||
def _season(self, season, year, sort, limit):
|
||||
query = """
|
||||
query ($page: Int, $season: MediaSeason, $year: Int, $sort: [MediaSort]) {
|
||||
Page(page: $page){
|
||||
pageInfo {hasNextPage}
|
||||
media(season: $season, seasonYear: $year, type: ANIME, sort: $sort){id}
|
||||
}
|
||||
}
|
||||
"""
|
||||
variables = {"season": season.upper(), "year": year, "sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"}
|
||||
return self._search(sort=sort, limit=limit, season=season.upper(), year=year)
|
||||
|
||||
def _search(self, sort="score", limit=0, **kwargs):
|
||||
query_vars = "$page: Int, $sort: [MediaSort]"
|
||||
media_vars = "sort: $sort, type: ANIME"
|
||||
variables = {"sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"}
|
||||
for key, value in kwargs.items():
|
||||
query_vars += f", ${key}: {search_translation[key]}"
|
||||
media_vars += f", {key}: ${key}"
|
||||
variables[key] = value
|
||||
query = f"query ({query_vars}) {{Page(page: $page){{pageInfo {{hasNextPage}}media({media_vars}){{id}}}}}}"
|
||||
logger.info(query)
|
||||
return self._pagenation(query, limit=limit, variables=variables)
|
||||
|
||||
def _genre(self, genre, sort, limit):
|
||||
query = """
|
||||
query ($page: Int, $genre: String, $sort: [MediaSort]) {
|
||||
Page(page: $page){
|
||||
pageInfo {hasNextPage}
|
||||
media(genre: $genre, sort: $sort){id}
|
||||
}
|
||||
}
|
||||
"""
|
||||
variables = {"genre": genre, "sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"}
|
||||
return self._pagenation(query, limit=limit, variables=variables)
|
||||
return self._search(sort=sort, limit=limit, genre=genre)
|
||||
|
||||
def _tag(self, tag, sort, limit):
|
||||
query = """
|
||||
query ($page: Int, $tag: String, $sort: [MediaSort]) {
|
||||
Page(page: $page){
|
||||
pageInfo {hasNextPage}
|
||||
media(tag: $tag, sort: $sort){id}
|
||||
}
|
||||
}
|
||||
"""
|
||||
variables = {"tag": tag, "sort": "SCORE_DESC" if sort == "score" else "POPULARITY_DESC"}
|
||||
return self._pagenation(query, limit=limit, variables=variables)
|
||||
return self._search(sort=sort, limit=limit, tag=tag)
|
||||
|
||||
def _studio(self, studio_id):
|
||||
query = """
|
||||
|
@ -166,7 +146,7 @@ class AniList:
|
|||
name = ""
|
||||
if not ignore_ids:
|
||||
ignore_ids = [anilist_id]
|
||||
anilist_id, name = self._validate(anilist_id)
|
||||
anilist_id, name = self._validate_id(anilist_id)
|
||||
anilist_ids.append(anilist_id)
|
||||
json_obj = self._request(query, {"id": anilist_id})
|
||||
edges = [media["node"]["id"] for media in json_obj["data"]["Media"]["relations"]["edges"]
|
||||
|
@ -183,22 +163,26 @@ class AniList:
|
|||
|
||||
return anilist_ids, ignore_ids, name
|
||||
|
||||
def validate_genre(self, genre):
|
||||
if genre.lower() in self.genres:
|
||||
return self.genres[genre.lower()]
|
||||
raise Failed(f"AniList Error: Genre: {genre} does not exist")
|
||||
|
||||
def validate_tag(self, tag):
|
||||
if tag.lower() in self.tags:
|
||||
return self.tags[tag.lower()]
|
||||
raise Failed(f"AniList Error: Tag: {tag} does not exist")
|
||||
return self._validate(tag, self.tags, "Tag")
|
||||
|
||||
def validate_category(self, category):
|
||||
return self._validate(category, self.categories, "Category")
|
||||
|
||||
def validate_genre(self, genre):
|
||||
return self._validate(genre, self.genres, "Genre")
|
||||
|
||||
def _validate(self, data, options, name):
|
||||
data_check = data.lower().replace(" / ", "-").replace(" ", "-")
|
||||
if data_check in options:
|
||||
return options[data_check]
|
||||
raise Failed(f"AniList Error: {name}: {data} does not exist\nOptions: {', '.join([v for k, v in options.items()])}")
|
||||
|
||||
def validate_anilist_ids(self, anilist_ids, studio=False):
|
||||
anilist_id_list = util.get_int_list(anilist_ids, "AniList ID")
|
||||
anilist_values = []
|
||||
query = f"query ($id: Int) {{{'Studio(id: $id) {name}' if studio else 'Media(id: $id) {id}'}}}"
|
||||
for anilist_id in anilist_id_list:
|
||||
if studio: query = "query ($id: Int) {Studio(id: $id) {name}}"
|
||||
else: query = "query ($id: Int) {Media(id: $id) {id}}"
|
||||
try:
|
||||
self._request(query, {"id": anilist_id})
|
||||
anilist_values.append(anilist_id)
|
||||
|
@ -210,7 +194,7 @@ class AniList:
|
|||
def get_anilist_ids(self, method, data):
|
||||
if method == "anilist_id":
|
||||
logger.info(f"Processing AniList ID: {data}")
|
||||
anilist_id, name = self._validate(data)
|
||||
anilist_id, name = self._validate_id(data)
|
||||
anilist_ids = [anilist_id]
|
||||
elif method == "anilist_popular":
|
||||
logger.info(f"Processing AniList Popular: {data} Anime")
|
||||
|
|
|
@ -384,21 +384,16 @@ class Config:
|
|||
paths_to_check = lib["metadata_path"] if isinstance(lib["metadata_path"], list) else [lib["metadata_path"]]
|
||||
for path in paths_to_check:
|
||||
if isinstance(path, dict):
|
||||
if "url" in path:
|
||||
if path["url"] is None:
|
||||
logger.error("Config Error: metadata_path url is blank")
|
||||
else:
|
||||
params["metadata_path"].append(("URL", path["url"]))
|
||||
if "git" in path:
|
||||
if path["git"] is None:
|
||||
logger.error("Config Error: metadata_path git is blank")
|
||||
else:
|
||||
params["metadata_path"].append(("Git", path['git']))
|
||||
if "file" in path:
|
||||
if path["file"] is None:
|
||||
logger.error("Config Error: metadata_path file is blank")
|
||||
else:
|
||||
params["metadata_path"].append(("File", path['file']))
|
||||
def check_dict(attr, name):
|
||||
if attr in path:
|
||||
if path[attr] is None:
|
||||
logger.error(f"Config Error: metadata_path {attr} is blank")
|
||||
else:
|
||||
params["metadata_path"].append((name, path[attr]))
|
||||
check_dict("url", "URL")
|
||||
check_dict("git", "Git")
|
||||
check_dict("file", "File")
|
||||
check_dict("folder", "Folder")
|
||||
else:
|
||||
params["metadata_path"].append(("File", path))
|
||||
else:
|
||||
|
|
|
@ -296,7 +296,7 @@ class Convert:
|
|||
if tvdb:
|
||||
tvdb_id.append(tvdb)
|
||||
if not tvdb_id:
|
||||
raise Failed(f"Unable to convert TMDb ID: {util.compile_list(tmdb_id)} to TVDb ID")
|
||||
raise Failed(f"Unable to convert TMDb ID: {', '.join(tmdb_id)} to TVDb ID")
|
||||
|
||||
if not imdb_id and tvdb_id:
|
||||
for tvdb in tvdb_id:
|
||||
|
@ -306,8 +306,8 @@ class Convert:
|
|||
|
||||
def update_cache(cache_ids, id_type, imdb_in, guid_type):
|
||||
if self.config.Cache:
|
||||
cache_ids = util.compile_list(cache_ids)
|
||||
imdb_in = util.compile_list(imdb_in) if imdb_in else None
|
||||
cache_ids = ",".join(cache_ids)
|
||||
imdb_in = ",".join(imdb_in) if imdb_in else None
|
||||
ids = f"{item.guid:<46} | {id_type} ID: {cache_ids:<7} | IMDb ID: {str(imdb_in):<10}"
|
||||
logger.info(util.adjust_space(f" Cache | {'^' if expired else '+'} | {ids} | {item.title}"))
|
||||
self.config.Cache.update_guid_map(item.guid, cache_ids, imdb_in, expired, guid_type)
|
||||
|
|
|
@ -240,7 +240,20 @@ class Plex:
|
|||
self.metadatas = []
|
||||
|
||||
self.metadata_files = []
|
||||
metadata = []
|
||||
for file_type, metadata_file in params["metadata_path"]:
|
||||
if file_type == "folder":
|
||||
if os.path.isdir(metadata_file):
|
||||
yml_files = util.glob_filter(os.path.join(metadata_file, "*.yml"))
|
||||
if yml_files:
|
||||
metadata.extend([("File", yml) for yml in yml_files])
|
||||
else:
|
||||
logger.error(f"Config Error: No YAML (.yml) files found in {metadata_file}")
|
||||
else:
|
||||
logger.error(f"Config Error: Folder not found: {metadata_file}")
|
||||
else:
|
||||
metadata.append((file_type, metadata_file))
|
||||
for file_type, metadata_file in metadata:
|
||||
try:
|
||||
meta_obj = Metadata(config, self, file_type, metadata_file)
|
||||
if meta_obj.collections:
|
||||
|
@ -747,11 +760,11 @@ class Plex:
|
|||
if _add:
|
||||
updated = True
|
||||
self.query_data(getattr(obj, f"add{attr.capitalize()}"), _add)
|
||||
logger.info(f"Detail: {attr.capitalize()} {util.compile_list(_add)} added to {obj.title}")
|
||||
logger.info(f"Detail: {attr.capitalize()} {','.join(_add)} added to {obj.title}")
|
||||
if _remove:
|
||||
updated = True
|
||||
self.query_data(getattr(obj, f"remove{attr.capitalize()}"), _remove)
|
||||
logger.info(f"Detail: {attr.capitalize()} {util.compile_list(_remove)} removed to {obj.title}")
|
||||
logger.info(f"Detail: {attr.capitalize()} {','.join(_remove)} removed to {obj.title}")
|
||||
return updated
|
||||
|
||||
def update_item_from_assets(self, item, overlay=None, create=False):
|
||||
|
|
|
@ -69,15 +69,6 @@ def add_dict_list(keys, value, dict_map):
|
|||
else:
|
||||
dict_map[key] = [value]
|
||||
|
||||
def compile_list(data):
|
||||
if isinstance(data, list):
|
||||
text = ""
|
||||
for item in data:
|
||||
text += f"{',' if len(text) > 0 else ''}{item}"
|
||||
return text
|
||||
else:
|
||||
return data
|
||||
|
||||
def get_list(data, lower=False, split=True, int_list=False):
|
||||
if data is None: return None
|
||||
elif isinstance(data, list): return data
|
||||
|
|
|
@ -368,17 +368,13 @@ def mass_metadata(config, library, items):
|
|||
raise Failed
|
||||
item_genres = [genre.tag for genre in item.genres]
|
||||
display_str = ""
|
||||
add_genre = []
|
||||
for genre in (g for g in new_genres if g not in item_genres):
|
||||
add_genre.append(genre)
|
||||
display_str += f"{', ' if len(display_str) > 0 else ''}+{genre}"
|
||||
add_genre = [genre for genre in (g for g in new_genres if g not in item_genres)]
|
||||
if len(add_genre) > 0:
|
||||
display_str += f"+{', +'.join(add_genre)}"
|
||||
library.query_data(item.addGenre, add_genre)
|
||||
remove_genre = []
|
||||
for genre in (g for g in item_genres if g not in new_genres):
|
||||
remove_genre.append(genre)
|
||||
display_str += f"{', ' if len(display_str) > 0 else ''}-{genre}"
|
||||
remove_genre = [genre for genre in (g for g in item_genres if g not in new_genres)]
|
||||
if len(remove_genre) > 0:
|
||||
display_str += f"-{', -'.join(remove_genre)}"
|
||||
library.query_data(item.removeGenre, remove_genre)
|
||||
if len(display_str) > 0:
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | Genres | {display_str}"))
|
||||
|
@ -568,7 +564,7 @@ try:
|
|||
minutes = int((seconds % 3600) // 60)
|
||||
time_str = f"{hours} Hour{'s' if hours > 1 else ''} and " if hours > 0 else ""
|
||||
time_str += f"{minutes} Minute{'s' if minutes > 1 else ''}"
|
||||
util.print_return(f"Current Time: {current} | {time_str} until the next run at {og_time_str} {util.compile_list(times_to_run)}")
|
||||
util.print_return(f"Current Time: {current} | {time_str} until the next run at {og_time_str} | Runs: {', '.join(times_to_run)}")
|
||||
time.sleep(60)
|
||||
except KeyboardInterrupt:
|
||||
util.separator("Exiting Plex Meta Manager")
|
||||
|
|
Loading…
Reference in a new issue