mirror of
https://github.com/meisnate12/Plex-Meta-Manager
synced 2024-11-10 15:04:21 +00:00
commit
022857435f
31 changed files with 1692 additions and 1008 deletions
38
.github/workflows/develop.yml
vendored
Normal file
38
.github/workflows/develop.yml
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
name: Docker Develop Release
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ develop ]
|
||||
pull_request:
|
||||
branches: [ develop ]
|
||||
|
||||
jobs:
|
||||
|
||||
docker-develop:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
|
||||
- name: Check Out Repo
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: develop
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKER_HUB_USERNAME }}/plex-meta-manager:develop
|
36
.github/workflows/latest.yml
vendored
Normal file
36
.github/workflows/latest.yml
vendored
Normal file
|
@ -0,0 +1,36 @@
|
|||
name: Docker Latest Release
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
|
||||
docker-latest:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
|
||||
- name: Check Out Repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKER_HUB_USERNAME }}/plex-meta-manager:latest
|
18
.github/workflows/tag.yml
vendored
Normal file
18
.github/workflows/tag.yml
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
name: Tag
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
tag-new-versions:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
token: ${{ secrets.PAT }}
|
||||
fetch-depth: 2
|
||||
- uses: salsify/action-detect-and-tag-new-version@v1.0.3
|
||||
with:
|
||||
version-command: |
|
||||
cat VERSION
|
39
.github/workflows/version.yml
vendored
Normal file
39
.github/workflows/version.yml
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
name: Docker Version Release
|
||||
|
||||
on:
|
||||
create:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
|
||||
docker-develop:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
|
||||
- name: Check Out Repo
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Get the version
|
||||
id: get_version
|
||||
run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: ./
|
||||
file: ./Dockerfile
|
||||
push: true
|
||||
tags: ${{ secrets.DOCKER_HUB_USERNAME }}/plex-meta-manager:${{ steps.get_version.outputs.VERSION }}
|
|
@ -1,9 +1,10 @@
|
|||
# Plex Meta Manager
|
||||
|
||||
[![GitHub release (latest by date)](https://img.shields.io/github/v/release/meisnate12/Plex-Meta-Manager?style=plastic)](https://github.com/meisnate12/Plex-Meta-Manager/releases)
|
||||
[![GitHub commits since latest release (by SemVer)](https://img.shields.io/github/commits-since/meisnate12/plex-meta-manager/latest/develop?label=Number%20of%20Commits%20in%20Develop&style=plastic)](https://github.com/meisnate12/Plex-Meta-Manager/tree/develop)
|
||||
[![GitHub commits since latest release (by SemVer)](https://img.shields.io/github/commits-since/meisnate12/plex-meta-manager/latest/develop?label=Commits%20in%20Develop&style=plastic)](https://github.com/meisnate12/Plex-Meta-Manager/tree/develop)
|
||||
[![Docker Image Version (latest semver)](https://img.shields.io/docker/v/meisnate12/plex-meta-manager?label=docker&sort=semver&style=plastic)](https://hub.docker.com/r/meisnate12/plex-meta-manager)
|
||||
[![Docker Cloud Build Status](https://img.shields.io/docker/cloud/build/meisnate12/plex-meta-manager?style=plastic)](https://hub.docker.com/r/meisnate12/plex-meta-manager)
|
||||
[![Docker Pulls](https://img.shields.io/docker/pulls/meisnate12/plex-meta-manager?style=plastic)](https://hub.docker.com/r/meisnate12/plex-meta-manager)
|
||||
[![Discord](https://img.shields.io/discord/822460010649878528?label=Discord&style=plastic)](https://discord.gg/TsdpsFYqqm)
|
||||
[![Sponsor or Donate](https://img.shields.io/badge/-Sponsor_or_Donate-blueviolet?style=plastic)](https://github.com/sponsors/meisnate12)
|
||||
|
||||
|
@ -11,7 +12,7 @@ The original concept for Plex Meta Manager is [Plex Auto Collections](https://gi
|
|||
|
||||
The script can update many metadata fields for movies, shows, collections, seasons, and episodes and can act as a backup if your plex DB goes down. It can even update metadata the plex UI can't like Season Names. If the time is put into the metadata configuration file you can have a way to recreate your library and all its metadata changes with the click of a button.
|
||||
|
||||
The script is designed to work with most Metadata agents including the new Plex Movie Agent, New Plex TV Agent, [Hama Anime Agent](https://github.com/ZeroQI/Hama.bundle), and [MyAnimeList Anime Agent](https://github.com/Fribb/MyAnimeList.bundle).
|
||||
The script works with most Metadata agents including the new Plex Movie Agent, New Plex TV Agent, [Hama Anime Agent](https://github.com/ZeroQI/Hama.bundle), and [MyAnimeList Anime Agent](https://github.com/Fribb/MyAnimeList.bundle).
|
||||
|
||||
## Getting Started
|
||||
|
||||
|
@ -23,7 +24,7 @@ The script is designed to work with most Metadata agents including the new Plex
|
|||
|
||||
## Support
|
||||
|
||||
* Before posting on Github about an enhancement, error, or configuration question please visit the [Plex Meta Manager Discord Server](https://discord.gg/TsdpsFYqqm).
|
||||
* Before posting on GitHub about an enhancement, error, or configuration question please visit the [Plex Meta Manager Discord Server](https://discord.gg/TsdpsFYqqm).
|
||||
* If you're getting an Error or have an Enhancement post in the [Issues](https://github.com/meisnate12/Plex-Meta-Manager/issues).
|
||||
* If you have a configuration question post in the [Discussions](https://github.com/meisnate12/Plex-Meta-Manager/discussions).
|
||||
* To see user submitted Metadata configuration files, and you to even add your own, go to the [Plex Meta Manager Configs](https://github.com/meisnate12/Plex-Meta-Manager-Configs).
|
||||
|
|
1
VERSION
Normal file
1
VERSION
Normal file
|
@ -0,0 +1 @@
|
|||
1.12.2-develop1115
|
|
@ -3,32 +3,40 @@
|
|||
libraries: # Library mappings must have a colon (:) placed after them
|
||||
Movies:
|
||||
metadata_path:
|
||||
- file: config/Movies.yml # You have to create this file the other are online
|
||||
- file: config/Movies.yml # You have to create this file the other is online
|
||||
- git: meisnate12/MovieCharts
|
||||
- git: meisnate12/Studios
|
||||
- git: meisnate12/IMDBGenres
|
||||
- git: meisnate12/People
|
||||
TV Shows:
|
||||
metadata_path:
|
||||
- file: config/TV Shows.yml # You have to create this file the other are online
|
||||
- file: config/TV Shows.yml # You have to create this file the other is online
|
||||
- git: meisnate12/ShowCharts
|
||||
- git: meisnate12/Networks
|
||||
Anime:
|
||||
metadata_path:
|
||||
- file: config/Anime.yml # You have to create this file the other are online
|
||||
- file: config/Anime.yml # You have to create this file the other is online
|
||||
- git: meisnate12/AnimeCharts
|
||||
settings: # Can be individually specified per library as well
|
||||
cache: true
|
||||
cache_expiration: 60
|
||||
asset_directory: config/assets
|
||||
asset_folders: true
|
||||
assets_for_all: false
|
||||
sync_mode: append
|
||||
show_unmanaged: true
|
||||
show_filtered: false
|
||||
show_missing: true
|
||||
save_missing: true
|
||||
run_again_delay: 2
|
||||
released_missing_only: false
|
||||
create_asset_folders: false
|
||||
missing_only_released: false
|
||||
collection_minimum: 1
|
||||
delete_below_minimum: true
|
||||
tvdb_language: eng
|
||||
webhooks: # Can be individually specified per library as well
|
||||
error:
|
||||
run_start:
|
||||
run_end:
|
||||
collection_creation:
|
||||
collection_addition:
|
||||
collection_removal:
|
||||
plex: # Can be individually specified per library as well
|
||||
url: http://192.168.1.12:32400
|
||||
token: ####################
|
||||
|
@ -42,10 +50,16 @@ tmdb:
|
|||
tautulli: # Can be individually specified per library as well
|
||||
url: http://192.168.1.12:8181
|
||||
apikey: ################################
|
||||
omdb:
|
||||
apikey: ########
|
||||
notifiarr:
|
||||
apikey: ####################################
|
||||
anidb: # Not required for AniDB builders unless you want mature content
|
||||
username: ######
|
||||
password: ######
|
||||
radarr: # Can be individually specified per library as well
|
||||
url: http://192.168.1.12:7878
|
||||
token: ################################
|
||||
version: v3
|
||||
add: false
|
||||
root_folder_path: S:/Movies
|
||||
monitor: true
|
||||
|
@ -56,7 +70,6 @@ radarr: # Can be individually specified
|
|||
sonarr: # Can be individually specified per library as well
|
||||
url: http://192.168.1.12:8989
|
||||
token: ################################
|
||||
version: v3
|
||||
add: false
|
||||
root_folder_path: "S:/TV Shows"
|
||||
monitor: all
|
||||
|
@ -67,8 +80,6 @@ sonarr: # Can be individually specified
|
|||
tag:
|
||||
search: false
|
||||
cutoff_search: false
|
||||
omdb:
|
||||
apikey: ########
|
||||
trakt:
|
||||
client_id: ################################################################
|
||||
client_secret: ################################################################
|
||||
|
@ -88,7 +99,4 @@ mal:
|
|||
access_token:
|
||||
token_type:
|
||||
expires_in:
|
||||
refresh_token:
|
||||
anidb: # Optional
|
||||
username: ######
|
||||
password: ######
|
||||
refresh_token:
|
|
@ -22,15 +22,21 @@ class AniDB:
|
|||
if params and not self._login(self.username, self.password).xpath("//li[@class='sub-menu my']/@title"):
|
||||
raise Failed("AniDB Error: Login failed")
|
||||
|
||||
def _request(self, url, language=None, post=None):
|
||||
if post:
|
||||
return self.config.post_html(url, post, headers=util.header(language))
|
||||
def _request(self, url, language=None, data=None):
|
||||
if self.config.trace_mode:
|
||||
logger.debug(f"URL: {url}")
|
||||
if data:
|
||||
return self.config.post_html(url, data=data, headers=util.header(language))
|
||||
else:
|
||||
return self.config.get_html(url, headers=util.header(language))
|
||||
|
||||
def _login(self, username, password):
|
||||
data = {"show": "main", "xuser": username, "xpass": password, "xdoautologin": "on"}
|
||||
return self._request(urls["login"], post=data)
|
||||
return self._request(urls["login"], data={
|
||||
"show": "main",
|
||||
"xuser": username,
|
||||
"xpass": password,
|
||||
"xdoautologin": "on"
|
||||
})
|
||||
|
||||
def _popular(self, language):
|
||||
response = self._request(urls["popular"], language=language)
|
||||
|
|
|
@ -61,8 +61,13 @@ class AniList:
|
|||
self.options["Tag Category"][media_tag["category"].lower().replace(" ", "-")] = media_tag["category"]
|
||||
|
||||
def _request(self, query, variables, level=1):
|
||||
if self.config.trace_mode:
|
||||
logger.debug(f"Query: {query}")
|
||||
logger.debug(f"Variables: {variables}")
|
||||
response = self.config.post(base_url, json={"query": query, "variables": variables})
|
||||
json_obj = response.json()
|
||||
if self.config.trace_mode:
|
||||
logger.debug(f"Response: {json_obj}")
|
||||
if "errors" in json_obj:
|
||||
if json_obj['errors'][0]['message'] == "Too Many Requests.":
|
||||
wait_time = int(response.headers["Retry-After"]) if "Retry-After" in response.headers else 0
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import logging, os, re
|
||||
from datetime import datetime, timedelta
|
||||
from modules import anidb, anilist, icheckmovies, imdb, letterboxd, mal, plex, radarr, sonarr, stevenlu, tautulli, tmdb, trakt, tvdb, util
|
||||
from modules.util import Failed, ImageData
|
||||
from modules.util import Failed, ImageData, NotScheduled
|
||||
from PIL import Image
|
||||
from plexapi.exceptions import BadRequest, NotFound
|
||||
from plexapi.video import Movie, Show, Season, Episode
|
||||
|
@ -41,7 +41,8 @@ method_alias = {
|
|||
"seasonyear": "year", "isadult": "adult", "startdate": "start", "enddate": "end", "averagescore": "score",
|
||||
"minimum_tag_percentage": "min_tag_percent", "minimumtagrank": "min_tag_percent", "minimum_tag_rank": "min_tag_percent",
|
||||
"anilist_tag": "anilist_search", "anilist_genre": "anilist_search", "anilist_season": "anilist_search",
|
||||
"mal_producer": "mal_studio", "mal_licensor": "mal_studio"
|
||||
"mal_producer": "mal_studio", "mal_licensor": "mal_studio",
|
||||
"trakt_recommended": "trakt_recommended_weekly", "trakt_watched": "trakt_watched_weekly", "trakt_collected": "trakt_collected_weekly"
|
||||
}
|
||||
filter_translation = {
|
||||
"actor": "actors",
|
||||
|
@ -68,7 +69,7 @@ show_only_builders = ["tmdb_network", "tmdb_show", "tmdb_show_details", "tvdb_sh
|
|||
movie_only_builders = [
|
||||
"letterboxd_list", "letterboxd_list_details", "icheckmovies_list", "icheckmovies_list_details", "stevenlu_popular",
|
||||
"tmdb_collection", "tmdb_collection_details", "tmdb_movie", "tmdb_movie_details", "tmdb_now_playing",
|
||||
"tvdb_movie", "tvdb_movie_details"
|
||||
"tvdb_movie", "tvdb_movie_details", "trakt_boxoffice"
|
||||
]
|
||||
summary_details = [
|
||||
"summary", "tmdb_summary", "tmdb_description", "tmdb_biography", "tvdb_summary",
|
||||
|
@ -76,16 +77,20 @@ summary_details = [
|
|||
]
|
||||
poster_details = ["url_poster", "tmdb_poster", "tmdb_profile", "tvdb_poster", "file_poster"]
|
||||
background_details = ["url_background", "tmdb_background", "tvdb_background", "file_background"]
|
||||
boolean_details = ["visible_library", "visible_home", "visible_shared", "show_filtered", "show_missing", "save_missing", "item_assets", "missing_only_released", "revert_overlay", "delete_below_minimum"]
|
||||
boolean_details = [
|
||||
"visible_library", "visible_home", "visible_shared", "show_filtered", "show_missing", "save_missing",
|
||||
"missing_only_released", "delete_below_minimum"
|
||||
]
|
||||
string_details = ["sort_title", "content_rating", "name_mapping"]
|
||||
ignored_details = [
|
||||
"smart_filter", "smart_label", "smart_url", "run_again", "schedule", "sync_mode", "template", "test",
|
||||
"tmdb_person", "build_collection", "collection_order", "collection_level", "validate_builders", "collection_name"
|
||||
]
|
||||
details = ["collection_mode", "collection_order", "collection_level", "collection_minimum", "label"] + boolean_details + string_details
|
||||
notification_details = ["collection_creation_webhooks", "collection_addition_webhooks", "collection_removal_webhooks"]
|
||||
details = ["collection_mode", "collection_order", "collection_level", "collection_minimum", "label"] + boolean_details + string_details + notification_details
|
||||
collectionless_details = ["collection_order", "plex_collectionless", "label", "label_sync_mode", "test"] + \
|
||||
poster_details + background_details + summary_details + string_details
|
||||
item_details = ["item_label", "item_radarr_tag", "item_sonarr_tag", "item_overlay"] + list(plex.item_advance_keys.keys())
|
||||
item_details = ["item_label", "item_radarr_tag", "item_sonarr_tag", "item_overlay", "item_assets", "revert_overlay", "item_refresh"] + list(plex.item_advance_keys.keys())
|
||||
radarr_details = ["radarr_add", "radarr_add_existing", "radarr_folder", "radarr_monitor", "radarr_search", "radarr_availability", "radarr_quality", "radarr_tag"]
|
||||
sonarr_details = [
|
||||
"sonarr_add", "sonarr_add_existing", "sonarr_folder", "sonarr_monitor", "sonarr_language", "sonarr_series",
|
||||
|
@ -142,7 +147,10 @@ custom_sort_builders = [
|
|||
"tmdb_list", "tmdb_popular", "tmdb_now_playing", "tmdb_top_rated",
|
||||
"tmdb_trending_daily", "tmdb_trending_weekly", "tmdb_discover",
|
||||
"tvdb_list", "imdb_list", "stevenlu_popular", "anidb_popular",
|
||||
"trakt_list", "trakt_trending", "trakt_popular", "trakt_recommended", "trakt_watched", "trakt_collected",
|
||||
"trakt_list", "trakt_trending", "trakt_popular", "trakt_boxoffice",
|
||||
"trakt_collected_daily", "trakt_collected_weekly", "trakt_collected_monthly", "trakt_collected_yearly", "trakt_collected_all",
|
||||
"trakt_recommended_daily", "trakt_recommended_weekly", "trakt_recommended_monthly", "trakt_recommended_yearly", "trakt_recommended_all",
|
||||
"trakt_watched_daily", "trakt_watched_weekly", "trakt_watched_monthly", "trakt_watched_yearly", "trakt_watched_all",
|
||||
"tautulli_popular", "tautulli_watched", "letterboxd_list", "icheckmovies_list",
|
||||
"anilist_top_rated", "anilist_popular", "anilist_season", "anilist_studio", "anilist_genre", "anilist_tag", "anilist_search",
|
||||
"mal_all", "mal_airing", "mal_upcoming", "mal_tv", "mal_movie", "mal_ova", "mal_special",
|
||||
|
@ -168,7 +176,10 @@ class CollectionBuilder:
|
|||
"save_missing": self.library.save_missing,
|
||||
"missing_only_released": self.library.missing_only_released,
|
||||
"create_asset_folders": self.library.create_asset_folders,
|
||||
"item_assets": False
|
||||
"delete_below_minimum": self.library.delete_below_minimum,
|
||||
"collection_creation_webhooks": self.library.collection_creation_webhooks,
|
||||
"collection_addition_webhooks": self.library.collection_addition_webhooks,
|
||||
"collection_removal_webhooks": self.library.collection_removal_webhooks,
|
||||
}
|
||||
self.item_details = {}
|
||||
self.radarr_details = {}
|
||||
|
@ -183,14 +194,18 @@ class CollectionBuilder:
|
|||
self.filtered_keys = {}
|
||||
self.run_again_movies = []
|
||||
self.run_again_shows = []
|
||||
self.notification_additions = []
|
||||
self.notification_removals = []
|
||||
self.items = []
|
||||
self.posters = {}
|
||||
self.backgrounds = {}
|
||||
self.summaries = {}
|
||||
self.schedule = ""
|
||||
self.minimum = self.library.collection_minimum
|
||||
self.delete_below_minimum = self.library.delete_below_minimum
|
||||
self.current_time = datetime.now()
|
||||
self.current_year = self.current_time.year
|
||||
self.exists = False
|
||||
self.created = False
|
||||
|
||||
methods = {m.lower(): m for m in self.data}
|
||||
|
||||
|
@ -381,7 +396,7 @@ class CollectionBuilder:
|
|||
if len(self.schedule) == 0:
|
||||
skip_collection = False
|
||||
if skip_collection:
|
||||
raise Failed(f"{self.schedule}\n\nCollection {self.name} not scheduled to run")
|
||||
raise NotScheduled(f"{self.schedule}\n\nCollection {self.name} not scheduled to run")
|
||||
|
||||
self.collectionless = "plex_collectionless" in methods
|
||||
|
||||
|
@ -617,6 +632,8 @@ class CollectionBuilder:
|
|||
if self.sync and self.obj:
|
||||
for item in self.library.get_collection_items(self.obj, self.smart_label_collection):
|
||||
self.plex_map[item.ratingKey] = item
|
||||
if self.obj:
|
||||
self.exists = True
|
||||
else:
|
||||
self.obj = None
|
||||
self.sync = False
|
||||
|
@ -634,9 +651,9 @@ class CollectionBuilder:
|
|||
elif method_name == "tmdb_biography":
|
||||
self.summaries[method_name] = self.config.TMDb.get_person(util.regex_first_int(method_data, "TMDb Person ID")).biography
|
||||
elif method_name == "tvdb_summary":
|
||||
self.summaries[method_name] = self.config.TVDb.get_movie_or_show(method_data, self.language, self.library.is_movie).summary
|
||||
self.summaries[method_name] = self.config.TVDb.get_item(method_data, self.library.is_movie).summary
|
||||
elif method_name == "tvdb_description":
|
||||
self.summaries[method_name] = self.config.TVDb.get_list_description(method_data, self.language)
|
||||
self.summaries[method_name] = self.config.TVDb.get_list_description(method_data)
|
||||
elif method_name == "trakt_description":
|
||||
self.summaries[method_name] = self.config.Trakt.list_description(self.config.Trakt.validate_trakt(method_data, self.library.is_movie)[0])
|
||||
elif method_name == "letterboxd_description":
|
||||
|
@ -654,7 +671,7 @@ class CollectionBuilder:
|
|||
url_slug = self.config.TMDb.get_person(util.regex_first_int(method_data, 'TMDb Person ID')).profile_path
|
||||
self.posters[method_name] = f"{self.config.TMDb.image_url}{url_slug}"
|
||||
elif method_name == "tvdb_poster":
|
||||
self.posters[method_name] = f"{self.config.TVDb.get_item(method_data, self.language, self.library.is_movie).poster_path}"
|
||||
self.posters[method_name] = f"{self.config.TVDb.get_item(method_data, self.library.is_movie).poster_path}"
|
||||
elif method_name == "file_poster":
|
||||
if os.path.exists(method_data):
|
||||
self.posters[method_name] = os.path.abspath(method_data)
|
||||
|
@ -668,7 +685,7 @@ class CollectionBuilder:
|
|||
url_slug = self.config.TMDb.get_movie_show_or_collection(util.regex_first_int(method_data, 'TMDb ID'), self.library.is_movie).poster_path
|
||||
self.backgrounds[method_name] = f"{self.config.TMDb.image_url}{url_slug}"
|
||||
elif method_name == "tvdb_background":
|
||||
self.posters[method_name] = f"{self.config.TVDb.get_item(method_data, self.language, self.library.is_movie).background_path}"
|
||||
self.posters[method_name] = f"{self.config.TVDb.get_item(method_data, self.library.is_movie).background_path}"
|
||||
elif method_name == "file_background":
|
||||
if os.path.exists(method_data):
|
||||
self.backgrounds[method_name] = os.path.abspath(method_data)
|
||||
|
@ -692,6 +709,8 @@ class CollectionBuilder:
|
|||
self.details["label.sync"] = util.get_list(method_data)
|
||||
else:
|
||||
self.details[method_final] = util.get_list(method_data)
|
||||
elif method_name in notification_details:
|
||||
self.details[method_name] = util.parse(method_name, method_data, datatype="list")
|
||||
elif method_name in boolean_details:
|
||||
default = self.details[method_name] if method_name in self.details else None
|
||||
self.details[method_name] = util.parse(method_name, method_data, datatype="bool", default=default)
|
||||
|
@ -722,6 +741,9 @@ class CollectionBuilder:
|
|||
raise Failed("Each Overlay can only be used once per Library")
|
||||
self.library.overlays.append(method_data)
|
||||
self.item_details[method_name] = method_data
|
||||
elif method_name in ["item_assets", "revert_overlay", "item_refresh"]:
|
||||
if util.parse(method_name, method_data, datatype="bool", default=False):
|
||||
self.item_details[method_name] = True
|
||||
elif method_name in plex.item_advance_keys:
|
||||
key, options = plex.item_advance_keys[method_name]
|
||||
if method_name in advance_new_agent and self.library.agent not in plex.new_plex_agents:
|
||||
|
@ -745,7 +767,6 @@ class CollectionBuilder:
|
|||
else:
|
||||
raise Failed(f"Collection Error: {method_name} attribute must be either announced, cinemas, released or db")
|
||||
elif method_name == "radarr_quality":
|
||||
self.library.Radarr.get_profile_id(method_data)
|
||||
self.radarr_details["quality"] = method_data
|
||||
elif method_name == "radarr_tag":
|
||||
self.radarr_details["tag"] = util.get_list(method_data)
|
||||
|
@ -753,19 +774,13 @@ class CollectionBuilder:
|
|||
def _sonarr(self, method_name, method_data):
|
||||
if method_name in ["sonarr_add", "sonarr_add_existing", "sonarr_season", "sonarr_search", "sonarr_cutoff_search"]:
|
||||
self.sonarr_details[method_name[7:]] = util.parse(method_name, method_data, datatype="bool")
|
||||
elif method_name == "sonarr_folder":
|
||||
self.sonarr_details["folder"] = method_data
|
||||
elif method_name in ["sonarr_folder", "sonarr_quality", "sonarr_language"]:
|
||||
self.sonarr_details[method_name[7:]] = method_data
|
||||
elif method_name == "sonarr_monitor":
|
||||
if str(method_data).lower() in sonarr.monitor_translation:
|
||||
self.sonarr_details["monitor"] = str(method_data).lower()
|
||||
else:
|
||||
raise Failed(f"Collection Error: {method_name} attribute must be either all, future, missing, existing, pilot, first, latest or none")
|
||||
elif method_name == "sonarr_quality":
|
||||
self.library.Sonarr.get_profile_id(method_data, "quality_profile")
|
||||
self.sonarr_details["quality"] = method_data
|
||||
elif method_name == "sonarr_language":
|
||||
self.library.Sonarr.get_profile_id(method_data, "language_profile")
|
||||
self.sonarr_details["language"] = method_data
|
||||
elif method_name == "sonarr_series":
|
||||
if str(method_data).lower() in sonarr.series_type:
|
||||
self.sonarr_details["series"] = str(method_data).lower()
|
||||
|
@ -1018,17 +1033,22 @@ class CollectionBuilder:
|
|||
self.builders.append(("trakt_list", trakt_list))
|
||||
if method_name.endswith("_details"):
|
||||
self.summaries[method_name] = self.config.Trakt.list_description(trakt_lists[0])
|
||||
elif method_name in ["trakt_trending", "trakt_popular", "trakt_recommended", "trakt_watched", "trakt_collected"]:
|
||||
self.builders.append((method_name, util.parse(method_name, method_data, datatype="int", default=10)))
|
||||
elif method_name in ["trakt_watchlist", "trakt_collection"]:
|
||||
for trakt_list in self.config.Trakt.validate_trakt(method_data, self.library.is_movie, trakt_type=method_name[6:]):
|
||||
self.builders.append((method_name, trakt_list))
|
||||
elif method_name == "trakt_boxoffice":
|
||||
if util.parse(method_name, method_data, datatype="bool", default=False):
|
||||
self.builders.append((method_name, 10))
|
||||
else:
|
||||
raise Failed(f"Collection Error: {method_name} must be set to true")
|
||||
elif method_name in trakt.builders:
|
||||
self.builders.append((method_name, util.parse(method_name, method_data, datatype="int", default=10)))
|
||||
|
||||
def _tvdb(self, method_name, method_data):
|
||||
values = util.get_list(method_data)
|
||||
if method_name.endswith("_details"):
|
||||
if method_name.startswith(("tvdb_movie", "tvdb_show")):
|
||||
item = self.config.TVDb.get_item(self.language, values[0], method_name.startswith("tvdb_movie"))
|
||||
item = self.config.TVDb.get_item(values[0], method_name.startswith("tvdb_movie"))
|
||||
if hasattr(item, "description") and item.description:
|
||||
self.summaries[method_name] = item.description
|
||||
if hasattr(item, "background_path") and item.background_path:
|
||||
|
@ -1036,7 +1056,7 @@ class CollectionBuilder:
|
|||
if hasattr(item, "poster_path") and item.poster_path:
|
||||
self.posters[method_name] = f"{self.config.TMDb.image_url}{item.poster_path}"
|
||||
elif method_name.startswith("tvdb_list"):
|
||||
self.summaries[method_name] = self.config.TVDb.get_list_description(values[0], self.language)
|
||||
self.summaries[method_name] = self.config.TVDb.get_list_description(values[0])
|
||||
for value in values:
|
||||
self.builders.append((method_name[:-8] if method_name.endswith("_details") else method_name, value))
|
||||
|
||||
|
@ -1091,7 +1111,7 @@ class CollectionBuilder:
|
|||
mal_ids = self.config.MyAnimeList.get_mal_ids(method, value)
|
||||
ids = self.config.Convert.myanimelist_to_ids(mal_ids, self.library)
|
||||
elif "tvdb" in method:
|
||||
ids = self.config.TVDb.get_tvdb_ids(method, value, self.language)
|
||||
ids = self.config.TVDb.get_tvdb_ids(method, value)
|
||||
elif "imdb" in method:
|
||||
ids = self.config.IMDb.get_imdb_ids(method, value, self.language)
|
||||
elif "icheckmovies" in method:
|
||||
|
@ -1119,7 +1139,7 @@ class CollectionBuilder:
|
|||
rating_keys.append(input_id)
|
||||
elif id_type == "tmdb" and not self.parts_collection:
|
||||
if input_id in self.library.movie_map:
|
||||
rating_keys.append(self.library.movie_map[input_id][0])
|
||||
rating_keys.extend(self.library.movie_map[input_id])
|
||||
elif input_id not in self.missing_movies:
|
||||
self.missing_movies.append(input_id)
|
||||
elif id_type in ["tvdb", "tmdb_show"] and not self.parts_collection:
|
||||
|
@ -1130,12 +1150,12 @@ class CollectionBuilder:
|
|||
logger.error(e)
|
||||
continue
|
||||
if input_id in self.library.show_map:
|
||||
rating_keys.append(self.library.show_map[input_id][0])
|
||||
rating_keys.extend(self.library.show_map[input_id])
|
||||
elif input_id not in self.missing_shows:
|
||||
self.missing_shows.append(input_id)
|
||||
elif id_type == "imdb" and not self.parts_collection:
|
||||
if input_id in self.library.imdb_map:
|
||||
rating_keys.append(self.library.imdb_map[input_id][0])
|
||||
rating_keys.extend(self.library.imdb_map[input_id])
|
||||
else:
|
||||
if self.do_missing:
|
||||
try:
|
||||
|
@ -1318,11 +1338,12 @@ class CollectionBuilder:
|
|||
bool_mod = "" if validation else "!"
|
||||
bool_arg = "true" if validation else "false"
|
||||
results, display_add = build_url_arg(1, mod=bool_mod, arg_s=bool_arg, mod_s="is")
|
||||
elif (attr in ["title", "episode_title", "studio", "decade", "year", "episode_year"] or attr in plex.tags) and modifier in ["", ".not", ".begins", ".ends"]:
|
||||
elif (attr in ["title", "episode_title", "studio", "decade", "year", "episode_year"] or attr in plex.tags) and modifier in ["", ".is", ".isnot", ".not", ".begins", ".ends"]:
|
||||
results = ""
|
||||
display_add = ""
|
||||
for og_value, result in validation:
|
||||
built_arg = build_url_arg(quote(result) if attr in string_filters else result, arg_s=og_value)
|
||||
print(og_value, result)
|
||||
built_arg = build_url_arg(quote(str(result)) if attr in string_filters else result, arg_s=og_value)
|
||||
display_add += built_arg[1]
|
||||
results += f"{conjunction if len(results) > 0 else ''}{built_arg[0]}"
|
||||
else:
|
||||
|
@ -1471,6 +1492,7 @@ class CollectionBuilder:
|
|||
def add_to_collection(self):
|
||||
name, collection_items = self.library.get_collection_name_and_items(self.obj if self.obj else self.name, self.smart_label_collection)
|
||||
total = len(self.rating_keys)
|
||||
amount_added = 0
|
||||
for i, item in enumerate(self.rating_keys, 1):
|
||||
try:
|
||||
current = self.fetch_item(item)
|
||||
|
@ -1481,13 +1503,46 @@ class CollectionBuilder:
|
|||
logger.info(util.adjust_space(f"{name} Collection | {current_operation} | {self.item_title(current)}"))
|
||||
if current in collection_items:
|
||||
self.plex_map[current.ratingKey] = None
|
||||
elif self.smart_label_collection:
|
||||
self.library.query_data(current.addLabel, name)
|
||||
else:
|
||||
self.library.query_data(current.addCollection, name)
|
||||
self.library.alter_collection(current, name, smart_label_collection=self.smart_label_collection)
|
||||
amount_added += 1
|
||||
if self.details["collection_addition_webhooks"]:
|
||||
if self.library.is_movie and current.ratingKey in self.library.movie_rating_key_map:
|
||||
add_id = self.library.movie_rating_key_map[current.ratingKey]
|
||||
elif self.library.is_show and current.ratingKey in self.library.show_rating_key_map:
|
||||
add_id = self.library.show_rating_key_map[current.ratingKey]
|
||||
else:
|
||||
add_id = None
|
||||
self.notification_additions.append({"title": current.title, "id": add_id})
|
||||
util.print_end()
|
||||
logger.info("")
|
||||
logger.info(f"{total} {self.collection_level.capitalize()}{'s' if total > 1 else ''} Processed")
|
||||
return amount_added
|
||||
|
||||
def sync_collection(self):
|
||||
amount_removed = 0
|
||||
for ratingKey, item in self.plex_map.items():
|
||||
if item is not None:
|
||||
if amount_removed == 0:
|
||||
logger.info("")
|
||||
util.separator(f"Removed from {self.name} Collection", space=False, border=False)
|
||||
logger.info("")
|
||||
self.library.reload(item)
|
||||
logger.info(f"{self.name} Collection | - | {self.item_title(item)}")
|
||||
self.library.alter_collection(item, self.name, smart_label_collection=self.smart_label_collection, add=False)
|
||||
if self.details["collection_removal_webhooks"]:
|
||||
if self.library.is_movie and item.ratingKey in self.library.movie_rating_key_map:
|
||||
remove_id = self.library.movie_rating_key_map[item.ratingKey]
|
||||
elif self.library.is_show and item.ratingKey in self.library.show_rating_key_map:
|
||||
remove_id = self.library.show_rating_key_map[item.ratingKey]
|
||||
else:
|
||||
remove_id = None
|
||||
self.notification_removals.append({"title": item.title, "id": remove_id})
|
||||
amount_removed += 1
|
||||
if amount_removed > 0:
|
||||
logger.info("")
|
||||
logger.info(f"{amount_removed} {self.collection_level.capitalize()}{'s' if amount_removed == 1 else ''} Removed")
|
||||
return amount_removed
|
||||
|
||||
def check_tmdb_filter(self, item_id, is_movie, item=None, check_released=False):
|
||||
if self.tmdb_filters or check_released:
|
||||
|
@ -1609,6 +1664,8 @@ class CollectionBuilder:
|
|||
return True
|
||||
|
||||
def run_missing(self):
|
||||
added_to_radarr = 0
|
||||
added_to_sonarr = 0
|
||||
if len(self.missing_movies) > 0:
|
||||
missing_movies_with_names = []
|
||||
for missing_id in self.missing_movies:
|
||||
|
@ -1635,7 +1692,7 @@ class CollectionBuilder:
|
|||
if self.library.Radarr:
|
||||
if self.radarr_details["add"]:
|
||||
try:
|
||||
self.library.Radarr.add_tmdb(missing_tmdb_ids, **self.radarr_details)
|
||||
added_to_radarr += self.library.Radarr.add_tmdb(missing_tmdb_ids, **self.radarr_details)
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
if "item_radarr_tag" in self.item_details:
|
||||
|
@ -1649,18 +1706,17 @@ class CollectionBuilder:
|
|||
missing_shows_with_names = []
|
||||
for missing_id in self.missing_shows:
|
||||
try:
|
||||
show = self.config.TVDb.get_series(self.language, missing_id)
|
||||
show = self.config.TVDb.get_series(missing_id)
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
continue
|
||||
current_title = str(show.title.encode("ascii", "replace").decode())
|
||||
if self.check_tmdb_filter(missing_id, False, check_released=self.details["missing_only_released"]):
|
||||
missing_shows_with_names.append((current_title, missing_id))
|
||||
missing_shows_with_names.append((show.title, missing_id))
|
||||
if self.details["show_missing"] is True:
|
||||
logger.info(f"{self.name} Collection | ? | {current_title} (TVDB: {missing_id})")
|
||||
logger.info(f"{self.name} Collection | ? | {show.title} (TVDB: {missing_id})")
|
||||
else:
|
||||
if self.details["show_filtered"] is True and self.details["show_missing"] is True:
|
||||
logger.info(f"{self.name} Collection | X | {current_title} (TVDb: {missing_id})")
|
||||
logger.info(f"{self.name} Collection | X | {show.title} (TVDb: {missing_id})")
|
||||
logger.info("")
|
||||
logger.info(f"{len(missing_shows_with_names)} Show{'s' if len(missing_shows_with_names) > 1 else ''} Missing")
|
||||
if len(missing_shows_with_names) > 0:
|
||||
|
@ -1671,7 +1727,7 @@ class CollectionBuilder:
|
|||
if self.library.Sonarr:
|
||||
if self.sonarr_details["add"]:
|
||||
try:
|
||||
self.library.Sonarr.add_tvdb(missing_tvdb_ids, **self.sonarr_details)
|
||||
added_to_sonarr += self.library.Sonarr.add_tvdb(missing_tvdb_ids, **self.sonarr_details)
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
if "item_sonarr_tag" in self.item_details:
|
||||
|
@ -1684,6 +1740,7 @@ class CollectionBuilder:
|
|||
if len(self.missing_parts) > 0 and self.library.is_show and self.details["save_missing"] is True:
|
||||
for missing in self.missing_parts:
|
||||
logger.info(f"{self.name} Collection | X | {missing}")
|
||||
return added_to_radarr, added_to_sonarr
|
||||
|
||||
def item_title(self, item):
|
||||
if self.collection_level == "season":
|
||||
|
@ -1702,34 +1759,10 @@ class CollectionBuilder:
|
|||
else:
|
||||
return item.title
|
||||
|
||||
def sync_collection(self):
|
||||
count_removed = 0
|
||||
for ratingKey, item in self.plex_map.items():
|
||||
if item is not None:
|
||||
if count_removed == 0:
|
||||
logger.info("")
|
||||
util.separator(f"Removed from {self.name} Collection", space=False, border=False)
|
||||
logger.info("")
|
||||
self.library.reload(item)
|
||||
logger.info(f"{self.name} Collection | - | {self.item_title(item)}")
|
||||
if self.smart_label_collection:
|
||||
self.library.query_data(item.removeLabel, self.name)
|
||||
else:
|
||||
self.library.query_data(item.removeCollection, self.name)
|
||||
count_removed += 1
|
||||
if count_removed > 0:
|
||||
logger.info("")
|
||||
logger.info(f"{count_removed} {self.collection_level.capitalize()}{'s' if count_removed == 1 else ''} Removed")
|
||||
|
||||
def update_item_details(self):
|
||||
add_tags = self.item_details["item_label"] if "item_label" in self.item_details else None
|
||||
remove_tags = self.item_details["item_label.remove"] if "item_label.remove" in self.item_details else None
|
||||
sync_tags = self.item_details["item_label.sync"] if "item_label.sync" in self.item_details else None
|
||||
|
||||
if self.build_collection:
|
||||
items = self.library.get_collection_items(self.obj, self.smart_label_collection)
|
||||
else:
|
||||
items = []
|
||||
def load_collection_items(self):
|
||||
if self.build_collection and self.obj:
|
||||
self.items = self.library.get_collection_items(self.obj, self.smart_label_collection)
|
||||
elif not self.build_collection:
|
||||
logger.info("")
|
||||
util.separator(f"Items Found for {self.name} Collection", space=False, border=False)
|
||||
logger.info("")
|
||||
|
@ -1737,10 +1770,13 @@ class CollectionBuilder:
|
|||
try:
|
||||
item = self.fetch_item(rk)
|
||||
logger.info(f"{item.title} (Rating Key: {rk})")
|
||||
items.append(item)
|
||||
self.items.append(item)
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
if not self.items:
|
||||
raise Failed(f"Plex Error: No Collection items found")
|
||||
|
||||
def update_item_details(self):
|
||||
logger.info("")
|
||||
util.separator(f"Updating Details of the Items in {self.name} Collection", space=False, border=False)
|
||||
logger.info("")
|
||||
|
@ -1759,7 +1795,8 @@ class CollectionBuilder:
|
|||
except Failed as e:
|
||||
logger.error(e)
|
||||
continue
|
||||
self.library.edit_tags("label", item, add_tags=[f"{overlay_name} Overlay"])
|
||||
if isinstance(item, (Movie, Show)):
|
||||
self.library.edit_tags("label", item, add_tags=[f"{overlay_name} Overlay"])
|
||||
self.config.Cache.update_remove_overlay(self.library.image_table_name, overlay_name)
|
||||
rating_keys = [int(item.ratingKey) for item in self.library.get_labeled_items(f"{overlay_name} Overlay")]
|
||||
overlay_folder = os.path.join(self.config.default_dir, "overlays", overlay_name)
|
||||
|
@ -1767,16 +1804,20 @@ class CollectionBuilder:
|
|||
temp_image = os.path.join(overlay_folder, f"temp.png")
|
||||
overlay = (overlay_name, overlay_folder, overlay_image, temp_image)
|
||||
|
||||
revert = "revert_overlay" in self.details and self.details["revert_overlay"]
|
||||
revert = "revert_overlay" in self.item_details
|
||||
if revert:
|
||||
overlay = None
|
||||
|
||||
add_tags = self.item_details["item_label"] if "item_label" in self.item_details else None
|
||||
remove_tags = self.item_details["item_label.remove"] if "item_label.remove" in self.item_details else None
|
||||
sync_tags = self.item_details["item_label.sync"] if "item_label.sync" in self.item_details else None
|
||||
|
||||
tmdb_ids = []
|
||||
tvdb_ids = []
|
||||
for item in items:
|
||||
for item in self.items:
|
||||
if int(item.ratingKey) in rating_keys and not revert:
|
||||
rating_keys.remove(int(item.ratingKey))
|
||||
if self.details["item_assets"] or overlay is not None:
|
||||
if "item_assets" in self.item_details or overlay is not None:
|
||||
try:
|
||||
self.library.update_item_from_assets(item, overlay=overlay)
|
||||
except Failed as e:
|
||||
|
@ -1793,6 +1834,8 @@ class CollectionBuilder:
|
|||
if getattr(item, key) != options[method_data]:
|
||||
advance_edits[key] = options[method_data]
|
||||
self.library.edit_item(item, item.title, self.collection_level.capitalize(), advance_edits, advanced=True)
|
||||
if "item_refresh" in self.item_details:
|
||||
item.refresh()
|
||||
|
||||
if len(tmdb_ids) > 0:
|
||||
if "item_radarr_tag" in self.item_details:
|
||||
|
@ -1823,7 +1866,7 @@ class CollectionBuilder:
|
|||
if self.obj:
|
||||
self.library.query(self.obj.delete)
|
||||
|
||||
def update_details(self):
|
||||
def load_collection(self):
|
||||
if not self.obj and self.smart_url:
|
||||
self.library.create_smart_collection(self.name, self.smart_type_key, self.smart_url)
|
||||
elif self.smart_label_collection:
|
||||
|
@ -1834,7 +1877,13 @@ class CollectionBuilder:
|
|||
except Failed:
|
||||
raise Failed(f"Collection Error: Label: {self.name} was not added to any items in the Library")
|
||||
self.obj = self.library.get_collection(self.name)
|
||||
if not self.exists:
|
||||
self.created = True
|
||||
|
||||
def update_details(self):
|
||||
logger.info("")
|
||||
util.separator(f"Updating Details of {self.name} Collection", space=False, border=False)
|
||||
logger.info("")
|
||||
if self.smart_url and self.smart_url != self.library.smart_filter(self.obj):
|
||||
self.library.update_smart_collection(self.obj, self.smart_url)
|
||||
logger.info(f"Detail: Smart Filter updated to {self.smart_url}")
|
||||
|
@ -1983,6 +2032,9 @@ class CollectionBuilder:
|
|||
self.library.upload_images(self.obj, poster=poster, background=background)
|
||||
|
||||
def sort_collection(self):
|
||||
logger.info("")
|
||||
util.separator(f"Sorting {self.name} Collection", space=False, border=False)
|
||||
logger.info("")
|
||||
items = self.library.get_collection_items(self.obj, self.smart_label_collection)
|
||||
keys = {item.ratingKey: item for item in items}
|
||||
previous = None
|
||||
|
@ -1994,10 +2046,27 @@ class CollectionBuilder:
|
|||
self.library.move_item(self.obj, key, after=previous)
|
||||
previous = key
|
||||
|
||||
def send_notifications(self):
|
||||
if self.obj and (
|
||||
(self.details["collection_creation_webhooks"] and self.created) or
|
||||
(self.details["collection_addition_webhooks"] and len(self.notification_additions) > 0) or
|
||||
(self.details["collection_removal_webhooks"] and len(self.notification_removals) > 0)
|
||||
):
|
||||
self.obj.reload()
|
||||
self.library.Webhooks.collection_hooks(
|
||||
self.details["collection_creation_webhooks"] + self.details["collection_addition_webhooks"] + self.details["collection_removal_webhooks"],
|
||||
self.obj,
|
||||
created=self.created,
|
||||
additions=self.notification_additions,
|
||||
removals=self.notification_removals
|
||||
)
|
||||
|
||||
def run_collections_again(self):
|
||||
self.obj = self.library.get_collection(self.name)
|
||||
name, collection_items = self.library.get_collection_name_and_items(self.obj, self.smart_label_collection)
|
||||
self.created = False
|
||||
rating_keys = []
|
||||
self.notification_additions = []
|
||||
for mm in self.run_again_movies:
|
||||
if mm in self.library.movie_map:
|
||||
rating_keys.extend(self.library.movie_map[mm])
|
||||
|
@ -2015,8 +2084,16 @@ class CollectionBuilder:
|
|||
if current in collection_items:
|
||||
logger.info(f"{name} Collection | = | {self.item_title(current)}")
|
||||
else:
|
||||
self.library.query_data(current.addLabel if self.smart_label_collection else current.addCollection, name)
|
||||
self.library.alter_collection(current, name, smart_label_collection=self.smart_label_collection)
|
||||
logger.info(f"{name} Collection | + | {self.item_title(current)}")
|
||||
if self.library.is_movie and current.ratingKey in self.library.movie_rating_key_map:
|
||||
add_id = self.library.movie_rating_key_map[current.ratingKey]
|
||||
elif self.library.is_show and current.ratingKey in self.library.show_rating_key_map:
|
||||
add_id = self.library.show_rating_key_map[current.ratingKey]
|
||||
else:
|
||||
add_id = None
|
||||
self.notification_additions.append({"title": current.title, "id": add_id})
|
||||
self.send_notifications()
|
||||
logger.info(f"{len(rating_keys)} {self.collection_level.capitalize()}{'s' if len(rating_keys) > 1 else ''} Processed")
|
||||
|
||||
if len(self.run_again_movies) > 0:
|
||||
|
@ -2039,7 +2116,7 @@ class CollectionBuilder:
|
|||
for missing_id in self.run_again_shows:
|
||||
if missing_id not in self.library.show_map:
|
||||
try:
|
||||
title = str(self.config.TVDb.get_series(self.language, missing_id).title.encode("ascii", "replace").decode())
|
||||
title = self.config.TVDb.get_series(missing_id).title
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
continue
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import logging, os, requests
|
||||
import base64, logging, os, requests
|
||||
from datetime import datetime
|
||||
from lxml import html
|
||||
from modules import util, radarr, sonarr
|
||||
|
@ -10,6 +10,7 @@ from modules.icheckmovies import ICheckMovies
|
|||
from modules.imdb import IMDb
|
||||
from modules.letterboxd import Letterboxd
|
||||
from modules.mal import MyAnimeList
|
||||
from modules.notifiarr import Notifiarr
|
||||
from modules.omdb import OMDb
|
||||
from modules.plex import Plex
|
||||
from modules.radarr import Radarr
|
||||
|
@ -20,6 +21,7 @@ from modules.tmdb import TMDb
|
|||
from modules.trakt import Trakt
|
||||
from modules.tvdb import TVDb
|
||||
from modules.util import Failed
|
||||
from modules.webhooks import Webhooks
|
||||
from retrying import retry
|
||||
from ruamel import yaml
|
||||
|
||||
|
@ -29,21 +31,23 @@ sync_modes = {"append": "Only Add Items to the Collection", "sync": "Add & Remov
|
|||
mass_update_options = {"tmdb": "Use TMDb Metadata", "omdb": "Use IMDb Metadata through OMDb"}
|
||||
|
||||
class Config:
|
||||
def __init__(self, default_dir, config_path=None, is_test=False, time_scheduled=None, requested_collections=None, requested_libraries=None, resume_from=None):
|
||||
def __init__(self, default_dir, attrs):
|
||||
logger.info("Locating config...")
|
||||
if config_path and os.path.exists(config_path): self.config_path = os.path.abspath(config_path)
|
||||
elif config_path and not os.path.exists(config_path): raise Failed(f"Config Error: config not found at {os.path.abspath(config_path)}")
|
||||
config_file = attrs["config_file"]
|
||||
if config_file and os.path.exists(config_file): self.config_path = os.path.abspath(config_file)
|
||||
elif config_file and not os.path.exists(config_file): raise Failed(f"Config Error: config not found at {os.path.abspath(config_file)}")
|
||||
elif os.path.exists(os.path.join(default_dir, "config.yml")): self.config_path = os.path.abspath(os.path.join(default_dir, "config.yml"))
|
||||
else: raise Failed(f"Config Error: config not found at {os.path.abspath(default_dir)}")
|
||||
logger.info(f"Using {self.config_path} as config")
|
||||
|
||||
self.default_dir = default_dir
|
||||
self.test_mode = is_test
|
||||
self.run_start_time = time_scheduled
|
||||
self.run_hour = datetime.strptime(time_scheduled, "%H:%M").hour
|
||||
self.requested_collections = util.get_list(requested_collections)
|
||||
self.requested_libraries = util.get_list(requested_libraries)
|
||||
self.resume_from = resume_from
|
||||
self.test_mode = attrs["test"] if "test" in attrs else False
|
||||
self.trace_mode = attrs["trace"] if "trace" in attrs else False
|
||||
self.run_start_time = attrs["time"]
|
||||
self.run_hour = datetime.strptime(attrs["time"], "%H:%M").hour
|
||||
self.requested_collections = util.get_list(attrs["collections"]) if "collections" in attrs else None
|
||||
self.requested_libraries = util.get_list(attrs["libraries"]) if "libraries" in attrs else None
|
||||
self.resume_from = attrs["resume"] if "resume" in attrs else None
|
||||
|
||||
yaml.YAML().allow_duplicate_keys = True
|
||||
try:
|
||||
|
@ -81,15 +85,17 @@ class Config:
|
|||
replace_attr(new_config["libraries"][library], "save_missing", "plex")
|
||||
if "libraries" in new_config: new_config["libraries"] = new_config.pop("libraries")
|
||||
if "settings" in new_config: new_config["settings"] = new_config.pop("settings")
|
||||
if "webhooks" in new_config: new_config["webhooks"] = new_config.pop("webhooks")
|
||||
if "plex" in new_config: new_config["plex"] = new_config.pop("plex")
|
||||
if "tmdb" in new_config: new_config["tmdb"] = new_config.pop("tmdb")
|
||||
if "tautulli" in new_config: new_config["tautulli"] = new_config.pop("tautulli")
|
||||
if "omdb" in new_config: new_config["omdb"] = new_config.pop("omdb")
|
||||
if "notifiarr" in new_config: new_config["notifiarr"] = new_config.pop("notifiarr")
|
||||
if "anidb" in new_config: new_config["anidb"] = new_config.pop("anidb")
|
||||
if "radarr" in new_config: new_config["radarr"] = new_config.pop("radarr")
|
||||
if "sonarr" in new_config: new_config["sonarr"] = new_config.pop("sonarr")
|
||||
if "omdb" in new_config: new_config["omdb"] = new_config.pop("omdb")
|
||||
if "trakt" in new_config: new_config["trakt"] = new_config.pop("trakt")
|
||||
if "mal" in new_config: new_config["mal"] = new_config.pop("mal")
|
||||
if "anidb" in new_config: new_config["anidb"] = new_config.pop("anidb")
|
||||
yaml.round_trip_dump(new_config, open(self.config_path, "w", encoding="utf-8"), indent=None, block_seq_indent=2)
|
||||
self.data = new_config
|
||||
except yaml.scanner.ScannerError as e:
|
||||
|
@ -132,9 +138,9 @@ class Config:
|
|||
elif var_type == "path":
|
||||
if os.path.exists(os.path.abspath(data[attribute])): return data[attribute]
|
||||
else: message = f"Path {os.path.abspath(data[attribute])} does not exist"
|
||||
elif var_type == "list": return util.get_list(data[attribute])
|
||||
elif var_type == "list": return util.get_list(data[attribute], split=False)
|
||||
elif var_type == "list_path":
|
||||
temp_list = [p for p in util.get_list(data[attribute], split=True) if os.path.exists(os.path.abspath(p))]
|
||||
temp_list = [p for p in util.get_list(data[attribute], split=False) if os.path.exists(os.path.abspath(p))]
|
||||
if len(temp_list) > 0: return temp_list
|
||||
else: message = "No Paths exist"
|
||||
elif var_type == "lower_list": return util.get_list(data[attribute], lower=True)
|
||||
|
@ -176,17 +182,27 @@ class Config:
|
|||
"cache_expiration": check_for_attribute(self.data, "cache_expiration", parent="settings", var_type="int", default=60),
|
||||
"asset_directory": check_for_attribute(self.data, "asset_directory", parent="settings", var_type="list_path", default=[os.path.join(default_dir, "assets")]),
|
||||
"asset_folders": check_for_attribute(self.data, "asset_folders", parent="settings", var_type="bool", default=True),
|
||||
"assets_for_all": check_for_attribute(self.data, "assets_for_all", parent="settings", var_type="bool", default=False),
|
||||
"assets_for_all": check_for_attribute(self.data, "assets_for_all", parent="settings", var_type="bool", default=False, save=False, do_print=False),
|
||||
"sync_mode": check_for_attribute(self.data, "sync_mode", parent="settings", default="append", test_list=sync_modes),
|
||||
"run_again_delay": check_for_attribute(self.data, "run_again_delay", parent="settings", var_type="int", default=0),
|
||||
"show_unmanaged": check_for_attribute(self.data, "show_unmanaged", parent="settings", var_type="bool", default=True),
|
||||
"show_filtered": check_for_attribute(self.data, "show_filtered", parent="settings", var_type="bool", default=False),
|
||||
"show_missing": check_for_attribute(self.data, "show_missing", parent="settings", var_type="bool", default=True),
|
||||
"show_missing_assets": check_for_attribute(self.data, "show_missing_assets", parent="settings", var_type="bool", default=True),
|
||||
"save_missing": check_for_attribute(self.data, "save_missing", parent="settings", var_type="bool", default=True),
|
||||
"missing_only_released": check_for_attribute(self.data, "missing_only_released", parent="settings", var_type="bool", default=False),
|
||||
"create_asset_folders": check_for_attribute(self.data, "create_asset_folders", parent="settings", var_type="bool", default=False),
|
||||
"collection_minimum": check_for_attribute(self.data, "collection_minimum", parent="settings", var_type="int", default=1),
|
||||
"delete_below_minimum": check_for_attribute(self.data, "delete_below_minimum", parent="settings", var_type="bool", default=False)
|
||||
"delete_below_minimum": check_for_attribute(self.data, "delete_below_minimum", parent="settings", var_type="bool", default=False),
|
||||
"tvdb_language": check_for_attribute(self.data, "tvdb_language", parent="settings", default="default")
|
||||
}
|
||||
self.webhooks = {
|
||||
"error": check_for_attribute(self.data, "error", parent="webhooks", var_type="list", default_is_none=True),
|
||||
"run_start": check_for_attribute(self.data, "run_start", parent="webhooks", var_type="list", default_is_none=True),
|
||||
"run_end": check_for_attribute(self.data, "run_end", parent="webhooks", var_type="list", default_is_none=True),
|
||||
"collection_creation": check_for_attribute(self.data, "collection_creation", parent="webhooks", var_type="list", default_is_none=True),
|
||||
"collection_addition": check_for_attribute(self.data, "collection_addition", parent="webhooks", var_type="list", default_is_none=True),
|
||||
"collection_removal": check_for_attribute(self.data, "collection_removal", parent="webhooks", var_type="list", default_is_none=True),
|
||||
}
|
||||
if self.general["cache"]:
|
||||
util.separator()
|
||||
|
@ -196,323 +212,401 @@ class Config:
|
|||
|
||||
util.separator()
|
||||
|
||||
self.TMDb = None
|
||||
if "tmdb" in self.data:
|
||||
logger.info("Connecting to TMDb...")
|
||||
self.TMDb = TMDb(self, {
|
||||
"apikey": check_for_attribute(self.data, "apikey", parent="tmdb", throw=True),
|
||||
"language": check_for_attribute(self.data, "language", parent="tmdb", default="en")
|
||||
})
|
||||
logger.info(f"TMDb Connection {'Failed' if self.TMDb is None else 'Successful'}")
|
||||
else:
|
||||
raise Failed("Config Error: tmdb attribute not found")
|
||||
|
||||
util.separator()
|
||||
|
||||
self.OMDb = None
|
||||
if "omdb" in self.data:
|
||||
logger.info("Connecting to OMDb...")
|
||||
self.NotifiarrFactory = None
|
||||
if "notifiarr" in self.data:
|
||||
logger.info("Connecting to Notifiarr...")
|
||||
try:
|
||||
self.OMDb = OMDb(self, {"apikey": check_for_attribute(self.data, "apikey", parent="omdb", throw=True)})
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
logger.info(f"OMDb Connection {'Failed' if self.OMDb is None else 'Successful'}")
|
||||
else:
|
||||
logger.warning("omdb attribute not found")
|
||||
|
||||
util.separator()
|
||||
|
||||
self.Trakt = None
|
||||
if "trakt" in self.data:
|
||||
logger.info("Connecting to Trakt...")
|
||||
try:
|
||||
self.Trakt = Trakt(self, {
|
||||
"client_id": check_for_attribute(self.data, "client_id", parent="trakt", throw=True),
|
||||
"client_secret": check_for_attribute(self.data, "client_secret", parent="trakt", throw=True),
|
||||
"config_path": self.config_path,
|
||||
"authorization": self.data["trakt"]["authorization"] if "authorization" in self.data["trakt"] else None
|
||||
self.NotifiarrFactory = Notifiarr(self, {
|
||||
"apikey": check_for_attribute(self.data, "apikey", parent="notifiarr", throw=True),
|
||||
"develop": check_for_attribute(self.data, "develop", parent="notifiarr", var_type="bool", default=False, do_print=False, save=False),
|
||||
"test": check_for_attribute(self.data, "test", parent="notifiarr", var_type="bool", default=False, do_print=False, save=False)
|
||||
})
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
logger.info(f"Trakt Connection {'Failed' if self.Trakt is None else 'Successful'}")
|
||||
logger.info(f"Notifiarr Connection {'Failed' if self.NotifiarrFactory is None else 'Successful'}")
|
||||
else:
|
||||
logger.warning("trakt attribute not found")
|
||||
logger.warning("notifiarr attribute not found")
|
||||
|
||||
self.Webhooks = Webhooks(self, self.webhooks, notifiarr=self.NotifiarrFactory)
|
||||
self.Webhooks.start_time_hooks(self.run_start_time)
|
||||
|
||||
self.errors = []
|
||||
|
||||
util.separator()
|
||||
|
||||
self.MyAnimeList = None
|
||||
if "mal" in self.data:
|
||||
logger.info("Connecting to My Anime List...")
|
||||
try:
|
||||
self.MyAnimeList = MyAnimeList(self, {
|
||||
"client_id": check_for_attribute(self.data, "client_id", parent="mal", throw=True),
|
||||
"client_secret": check_for_attribute(self.data, "client_secret", parent="mal", throw=True),
|
||||
"config_path": self.config_path,
|
||||
"authorization": self.data["mal"]["authorization"] if "authorization" in self.data["mal"] else None
|
||||
try:
|
||||
self.TMDb = None
|
||||
if "tmdb" in self.data:
|
||||
logger.info("Connecting to TMDb...")
|
||||
self.TMDb = TMDb(self, {
|
||||
"apikey": check_for_attribute(self.data, "apikey", parent="tmdb", throw=True),
|
||||
"language": check_for_attribute(self.data, "language", parent="tmdb", default="en")
|
||||
})
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
logger.info(f"My Anime List Connection {'Failed' if self.MyAnimeList is None else 'Successful'}")
|
||||
else:
|
||||
logger.warning("mal attribute not found")
|
||||
logger.info(f"TMDb Connection {'Failed' if self.TMDb is None else 'Successful'}")
|
||||
else:
|
||||
raise Failed("Config Error: tmdb attribute not found")
|
||||
|
||||
util.separator()
|
||||
|
||||
self.AniDB = None
|
||||
if "anidb" in self.data:
|
||||
util.separator()
|
||||
logger.info("Connecting to AniDB...")
|
||||
try:
|
||||
self.AniDB = AniDB(self, {
|
||||
"username": check_for_attribute(self.data, "username", parent="anidb", throw=True),
|
||||
"password": check_for_attribute(self.data, "password", parent="anidb", throw=True)
|
||||
|
||||
self.OMDb = None
|
||||
if "omdb" in self.data:
|
||||
logger.info("Connecting to OMDb...")
|
||||
try:
|
||||
self.OMDb = OMDb(self, {"apikey": check_for_attribute(self.data, "apikey", parent="omdb", throw=True)})
|
||||
except Failed as e:
|
||||
self.errors.append(e)
|
||||
logger.error(e)
|
||||
logger.info(f"OMDb Connection {'Failed' if self.OMDb is None else 'Successful'}")
|
||||
else:
|
||||
logger.warning("omdb attribute not found")
|
||||
|
||||
util.separator()
|
||||
|
||||
self.Trakt = None
|
||||
if "trakt" in self.data:
|
||||
logger.info("Connecting to Trakt...")
|
||||
try:
|
||||
self.Trakt = Trakt(self, {
|
||||
"client_id": check_for_attribute(self.data, "client_id", parent="trakt", throw=True),
|
||||
"client_secret": check_for_attribute(self.data, "client_secret", parent="trakt", throw=True),
|
||||
"config_path": self.config_path,
|
||||
"authorization": self.data["trakt"]["authorization"] if "authorization" in self.data["trakt"] else None
|
||||
})
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
logger.info(f"My Anime List Connection {'Failed Continuing as Guest ' if self.MyAnimeList is None else 'Successful'}")
|
||||
if self.AniDB is None:
|
||||
self.AniDB = AniDB(self, None)
|
||||
except Failed as e:
|
||||
self.errors.append(e)
|
||||
logger.error(e)
|
||||
logger.info(f"Trakt Connection {'Failed' if self.Trakt is None else 'Successful'}")
|
||||
else:
|
||||
logger.warning("trakt attribute not found")
|
||||
|
||||
self.TVDb = TVDb(self)
|
||||
self.IMDb = IMDb(self)
|
||||
self.Convert = Convert(self)
|
||||
self.AniList = AniList(self)
|
||||
self.Letterboxd = Letterboxd(self)
|
||||
self.ICheckMovies = ICheckMovies(self)
|
||||
self.StevenLu = StevenLu(self)
|
||||
|
||||
util.separator()
|
||||
|
||||
logger.info("Connecting to Plex Libraries...")
|
||||
|
||||
self.general["plex"] = {
|
||||
"url": check_for_attribute(self.data, "url", parent="plex", var_type="url", default_is_none=True),
|
||||
"token": check_for_attribute(self.data, "token", parent="plex", default_is_none=True),
|
||||
"timeout": check_for_attribute(self.data, "timeout", parent="plex", var_type="int", default=60),
|
||||
"clean_bundles": check_for_attribute(self.data, "clean_bundles", parent="plex", var_type="bool", default=False),
|
||||
"empty_trash": check_for_attribute(self.data, "empty_trash", parent="plex", var_type="bool", default=False),
|
||||
"optimize": check_for_attribute(self.data, "optimize", parent="plex", var_type="bool", default=False)
|
||||
}
|
||||
self.general["radarr"] = {
|
||||
"url": check_for_attribute(self.data, "url", parent="radarr", var_type="url", default_is_none=True),
|
||||
"token": check_for_attribute(self.data, "token", parent="radarr", default_is_none=True),
|
||||
"add": check_for_attribute(self.data, "add", parent="radarr", var_type="bool", default=False),
|
||||
"add_existing": check_for_attribute(self.data, "add_existing", parent="radarr", var_type="bool", default=False),
|
||||
"root_folder_path": check_for_attribute(self.data, "root_folder_path", parent="radarr", default_is_none=True),
|
||||
"monitor": check_for_attribute(self.data, "monitor", parent="radarr", var_type="bool", default=True),
|
||||
"availability": check_for_attribute(self.data, "availability", parent="radarr", test_list=radarr.availability_descriptions, default="announced"),
|
||||
"quality_profile": check_for_attribute(self.data, "quality_profile", parent="radarr", default_is_none=True),
|
||||
"tag": check_for_attribute(self.data, "tag", parent="radarr", var_type="lower_list", default_is_none=True),
|
||||
"search": check_for_attribute(self.data, "search", parent="radarr", var_type="bool", default=False)
|
||||
}
|
||||
self.general["sonarr"] = {
|
||||
"url": check_for_attribute(self.data, "url", parent="sonarr", var_type="url", default_is_none=True),
|
||||
"token": check_for_attribute(self.data, "token", parent="sonarr", default_is_none=True),
|
||||
"add": check_for_attribute(self.data, "add", parent="sonarr", var_type="bool", default=False),
|
||||
"add_existing": check_for_attribute(self.data, "add_existing", parent="sonarr", var_type="bool", default=False),
|
||||
"root_folder_path": check_for_attribute(self.data, "root_folder_path", parent="sonarr", default_is_none=True),
|
||||
"monitor": check_for_attribute(self.data, "monitor", parent="sonarr", test_list=sonarr.monitor_descriptions, default="all"),
|
||||
"quality_profile": check_for_attribute(self.data, "quality_profile", parent="sonarr", default_is_none=True),
|
||||
"language_profile": check_for_attribute(self.data, "language_profile", parent="sonarr", default_is_none=True),
|
||||
"series_type": check_for_attribute(self.data, "series_type", parent="sonarr", test_list=sonarr.series_type_descriptions, default="standard"),
|
||||
"season_folder": check_for_attribute(self.data, "season_folder", parent="sonarr", var_type="bool", default=True),
|
||||
"tag": check_for_attribute(self.data, "tag", parent="sonarr", var_type="lower_list", default_is_none=True),
|
||||
"search": check_for_attribute(self.data, "search", parent="sonarr", var_type="bool", default=False),
|
||||
"cutoff_search": check_for_attribute(self.data, "cutoff_search", parent="sonarr", var_type="bool", default=False)
|
||||
}
|
||||
self.general["tautulli"] = {
|
||||
"url": check_for_attribute(self.data, "url", parent="tautulli", var_type="url", default_is_none=True),
|
||||
"apikey": check_for_attribute(self.data, "apikey", parent="tautulli", default_is_none=True)
|
||||
}
|
||||
|
||||
self.libraries = []
|
||||
libs = check_for_attribute(self.data, "libraries", throw=True)
|
||||
|
||||
for library_name, lib in libs.items():
|
||||
if self.requested_libraries and library_name not in self.requested_libraries:
|
||||
continue
|
||||
util.separator()
|
||||
params = {
|
||||
"mapping_name": str(library_name),
|
||||
"name": str(lib["library_name"]) if lib and "library_name" in lib and lib["library_name"] else str(library_name)
|
||||
|
||||
self.MyAnimeList = None
|
||||
if "mal" in self.data:
|
||||
logger.info("Connecting to My Anime List...")
|
||||
try:
|
||||
self.MyAnimeList = MyAnimeList(self, {
|
||||
"client_id": check_for_attribute(self.data, "client_id", parent="mal", throw=True),
|
||||
"client_secret": check_for_attribute(self.data, "client_secret", parent="mal", throw=True),
|
||||
"config_path": self.config_path,
|
||||
"authorization": self.data["mal"]["authorization"] if "authorization" in self.data["mal"] else None
|
||||
})
|
||||
except Failed as e:
|
||||
self.errors.append(e)
|
||||
logger.error(e)
|
||||
logger.info(f"My Anime List Connection {'Failed' if self.MyAnimeList is None else 'Successful'}")
|
||||
else:
|
||||
logger.warning("mal attribute not found")
|
||||
|
||||
util.separator()
|
||||
|
||||
self.AniDB = None
|
||||
if "anidb" in self.data:
|
||||
util.separator()
|
||||
logger.info("Connecting to AniDB...")
|
||||
try:
|
||||
self.AniDB = AniDB(self, {
|
||||
"username": check_for_attribute(self.data, "username", parent="anidb", throw=True),
|
||||
"password": check_for_attribute(self.data, "password", parent="anidb", throw=True)
|
||||
})
|
||||
except Failed as e:
|
||||
self.errors.append(e)
|
||||
logger.error(e)
|
||||
logger.info(f"My Anime List Connection {'Failed Continuing as Guest ' if self.MyAnimeList is None else 'Successful'}")
|
||||
if self.AniDB is None:
|
||||
self.AniDB = AniDB(self, None)
|
||||
|
||||
self.TVDb = TVDb(self, self.general["tvdb_language"])
|
||||
self.IMDb = IMDb(self)
|
||||
self.Convert = Convert(self)
|
||||
self.AniList = AniList(self)
|
||||
self.Letterboxd = Letterboxd(self)
|
||||
self.ICheckMovies = ICheckMovies(self)
|
||||
self.StevenLu = StevenLu(self)
|
||||
|
||||
util.separator()
|
||||
|
||||
logger.info("Connecting to Plex Libraries...")
|
||||
|
||||
self.general["plex"] = {
|
||||
"url": check_for_attribute(self.data, "url", parent="plex", var_type="url", default_is_none=True),
|
||||
"token": check_for_attribute(self.data, "token", parent="plex", default_is_none=True),
|
||||
"timeout": check_for_attribute(self.data, "timeout", parent="plex", var_type="int", default=60),
|
||||
"clean_bundles": check_for_attribute(self.data, "clean_bundles", parent="plex", var_type="bool", default=False),
|
||||
"empty_trash": check_for_attribute(self.data, "empty_trash", parent="plex", var_type="bool", default=False),
|
||||
"optimize": check_for_attribute(self.data, "optimize", parent="plex", var_type="bool", default=False)
|
||||
}
|
||||
self.general["radarr"] = {
|
||||
"url": check_for_attribute(self.data, "url", parent="radarr", var_type="url", default_is_none=True),
|
||||
"token": check_for_attribute(self.data, "token", parent="radarr", default_is_none=True),
|
||||
"add": check_for_attribute(self.data, "add", parent="radarr", var_type="bool", default=False),
|
||||
"add_existing": check_for_attribute(self.data, "add_existing", parent="radarr", var_type="bool", default=False),
|
||||
"root_folder_path": check_for_attribute(self.data, "root_folder_path", parent="radarr", default_is_none=True),
|
||||
"monitor": check_for_attribute(self.data, "monitor", parent="radarr", var_type="bool", default=True),
|
||||
"availability": check_for_attribute(self.data, "availability", parent="radarr", test_list=radarr.availability_descriptions, default="announced"),
|
||||
"quality_profile": check_for_attribute(self.data, "quality_profile", parent="radarr", default_is_none=True),
|
||||
"tag": check_for_attribute(self.data, "tag", parent="radarr", var_type="lower_list", default_is_none=True),
|
||||
"search": check_for_attribute(self.data, "search", parent="radarr", var_type="bool", default=False)
|
||||
}
|
||||
self.general["sonarr"] = {
|
||||
"url": check_for_attribute(self.data, "url", parent="sonarr", var_type="url", default_is_none=True),
|
||||
"token": check_for_attribute(self.data, "token", parent="sonarr", default_is_none=True),
|
||||
"add": check_for_attribute(self.data, "add", parent="sonarr", var_type="bool", default=False),
|
||||
"add_existing": check_for_attribute(self.data, "add_existing", parent="sonarr", var_type="bool", default=False),
|
||||
"root_folder_path": check_for_attribute(self.data, "root_folder_path", parent="sonarr", default_is_none=True),
|
||||
"monitor": check_for_attribute(self.data, "monitor", parent="sonarr", test_list=sonarr.monitor_descriptions, default="all"),
|
||||
"quality_profile": check_for_attribute(self.data, "quality_profile", parent="sonarr", default_is_none=True),
|
||||
"language_profile": check_for_attribute(self.data, "language_profile", parent="sonarr", default_is_none=True),
|
||||
"series_type": check_for_attribute(self.data, "series_type", parent="sonarr", test_list=sonarr.series_type_descriptions, default="standard"),
|
||||
"season_folder": check_for_attribute(self.data, "season_folder", parent="sonarr", var_type="bool", default=True),
|
||||
"tag": check_for_attribute(self.data, "tag", parent="sonarr", var_type="lower_list", default_is_none=True),
|
||||
"search": check_for_attribute(self.data, "search", parent="sonarr", var_type="bool", default=False),
|
||||
"cutoff_search": check_for_attribute(self.data, "cutoff_search", parent="sonarr", var_type="bool", default=False)
|
||||
}
|
||||
self.general["tautulli"] = {
|
||||
"url": check_for_attribute(self.data, "url", parent="tautulli", var_type="url", default_is_none=True),
|
||||
"apikey": check_for_attribute(self.data, "apikey", parent="tautulli", default_is_none=True)
|
||||
}
|
||||
display_name = f"{params['name']} ({params['mapping_name']})" if lib and "library_name" in lib and lib["library_name"] else params["mapping_name"]
|
||||
|
||||
util.separator(f"{display_name} Configuration")
|
||||
logger.info("")
|
||||
logger.info(f"Connecting to {display_name} Library...")
|
||||
self.libraries = []
|
||||
libs = check_for_attribute(self.data, "libraries", throw=True)
|
||||
|
||||
params["asset_directory"] = check_for_attribute(lib, "asset_directory", parent="settings", var_type="list_path", default=self.general["asset_directory"], default_is_none=True, save=False)
|
||||
if params["asset_directory"] is None:
|
||||
logger.warning("Config Warning: Assets will not be used asset_directory attribute must be set under config or under this specific Library")
|
||||
|
||||
params["asset_folders"] = check_for_attribute(lib, "asset_folders", parent="settings", var_type="bool", default=self.general["asset_folders"], do_print=False, save=False)
|
||||
params["assets_for_all"] = check_for_attribute(lib, "assets_for_all", parent="settings", var_type="bool", default=self.general["assets_for_all"], do_print=False, save=False)
|
||||
params["sync_mode"] = check_for_attribute(lib, "sync_mode", parent="settings", test_list=sync_modes, default=self.general["sync_mode"], do_print=False, save=False)
|
||||
params["show_unmanaged"] = check_for_attribute(lib, "show_unmanaged", parent="settings", var_type="bool", default=self.general["show_unmanaged"], do_print=False, save=False)
|
||||
params["show_filtered"] = check_for_attribute(lib, "show_filtered", parent="settings", var_type="bool", default=self.general["show_filtered"], do_print=False, save=False)
|
||||
params["show_missing"] = check_for_attribute(lib, "show_missing", parent="settings", var_type="bool", default=self.general["show_missing"], do_print=False, save=False)
|
||||
params["save_missing"] = check_for_attribute(lib, "save_missing", parent="settings", var_type="bool", default=self.general["save_missing"], do_print=False, save=False)
|
||||
params["missing_only_released"] = check_for_attribute(lib, "missing_only_released", parent="settings", var_type="bool", default=self.general["missing_only_released"], do_print=False, save=False)
|
||||
params["create_asset_folders"] = check_for_attribute(lib, "create_asset_folders", parent="settings", var_type="bool", default=self.general["create_asset_folders"], do_print=False, save=False)
|
||||
params["collection_minimum"] = check_for_attribute(lib, "collection_minimum", parent="settings", var_type="int", default=self.general["collection_minimum"], do_print=False, save=False)
|
||||
params["delete_below_minimum"] = check_for_attribute(lib, "delete_below_minimum", parent="settings", var_type="bool", default=self.general["delete_below_minimum"], do_print=False, save=False)
|
||||
|
||||
params["mass_genre_update"] = check_for_attribute(lib, "mass_genre_update", test_list=mass_update_options, default_is_none=True, save=False, do_print=lib and "mass_genre_update" in lib)
|
||||
if self.OMDb is None and params["mass_genre_update"] == "omdb":
|
||||
params["mass_genre_update"] = None
|
||||
logger.error("Config Error: mass_genre_update cannot be omdb without a successful OMDb Connection")
|
||||
|
||||
params["mass_audience_rating_update"] = check_for_attribute(lib, "mass_audience_rating_update", test_list=mass_update_options, default_is_none=True, save=False, do_print=lib and "mass_audience_rating_update" in lib)
|
||||
if self.OMDb is None and params["mass_audience_rating_update"] == "omdb":
|
||||
params["mass_audience_rating_update"] = None
|
||||
logger.error("Config Error: mass_audience_rating_update cannot be omdb without a successful OMDb Connection")
|
||||
|
||||
params["mass_critic_rating_update"] = check_for_attribute(lib, "mass_critic_rating_update", test_list=mass_update_options, default_is_none=True, save=False, do_print=lib and "mass_audience_rating_update" in lib)
|
||||
if self.OMDb is None and params["mass_critic_rating_update"] == "omdb":
|
||||
params["mass_critic_rating_update"] = None
|
||||
logger.error("Config Error: mass_critic_rating_update cannot be omdb without a successful OMDb Connection")
|
||||
|
||||
params["mass_trakt_rating_update"] = check_for_attribute(lib, "mass_trakt_rating_update", var_type="bool", default=False, save=False, do_print=lib and "mass_trakt_rating_update" in lib)
|
||||
if self.Trakt is None and params["mass_trakt_rating_update"]:
|
||||
params["mass_trakt_rating_update"] = None
|
||||
logger.error("Config Error: mass_trakt_rating_update cannot run without a successful Trakt Connection")
|
||||
|
||||
params["split_duplicates"] = check_for_attribute(lib, "split_duplicates", var_type="bool", default=False, save=False, do_print=lib and "split_duplicates" in lib)
|
||||
params["radarr_add_all"] = check_for_attribute(lib, "radarr_add_all", var_type="bool", default=False, save=False, do_print=lib and "radarr_add_all" in lib)
|
||||
params["sonarr_add_all"] = check_for_attribute(lib, "sonarr_add_all", var_type="bool", default=False, save=False, do_print=lib and "sonarr_add_all" in lib)
|
||||
|
||||
try:
|
||||
if lib and "metadata_path" in lib:
|
||||
params["metadata_path"] = []
|
||||
if lib["metadata_path"] is None:
|
||||
raise Failed("Config Error: metadata_path attribute is blank")
|
||||
paths_to_check = lib["metadata_path"] if isinstance(lib["metadata_path"], list) else [lib["metadata_path"]]
|
||||
for path in paths_to_check:
|
||||
if isinstance(path, dict):
|
||||
def check_dict(attr, name):
|
||||
if attr in path:
|
||||
if path[attr] is None:
|
||||
logger.error(f"Config Error: metadata_path {attr} is blank")
|
||||
else:
|
||||
params["metadata_path"].append((name, path[attr]))
|
||||
check_dict("url", "URL")
|
||||
check_dict("git", "Git")
|
||||
check_dict("file", "File")
|
||||
check_dict("folder", "Folder")
|
||||
else:
|
||||
params["metadata_path"].append(("File", path))
|
||||
else:
|
||||
params["metadata_path"] = [("File", os.path.join(default_dir, f"{library_name}.yml"))]
|
||||
params["default_dir"] = default_dir
|
||||
params["plex"] = {
|
||||
"url": check_for_attribute(lib, "url", parent="plex", var_type="url", default=self.general["plex"]["url"], req_default=True, save=False),
|
||||
"token": check_for_attribute(lib, "token", parent="plex", default=self.general["plex"]["token"], req_default=True, save=False),
|
||||
"timeout": check_for_attribute(lib, "timeout", parent="plex", var_type="int", default=self.general["plex"]["timeout"], save=False),
|
||||
"clean_bundles": check_for_attribute(lib, "clean_bundles", parent="plex", var_type="bool", default=self.general["plex"]["clean_bundles"], save=False),
|
||||
"empty_trash": check_for_attribute(lib, "empty_trash", parent="plex", var_type="bool", default=self.general["plex"]["empty_trash"], save=False),
|
||||
"optimize": check_for_attribute(lib, "optimize", parent="plex", var_type="bool", default=self.general["plex"]["optimize"], save=False)
|
||||
for library_name, lib in libs.items():
|
||||
if self.requested_libraries and library_name not in self.requested_libraries:
|
||||
continue
|
||||
util.separator()
|
||||
params = {
|
||||
"mapping_name": str(library_name),
|
||||
"name": str(lib["library_name"]) if lib and "library_name" in lib and lib["library_name"] else str(library_name)
|
||||
}
|
||||
library = Plex(self, params)
|
||||
logger.info("")
|
||||
logger.info(f"{display_name} Library Connection Successful")
|
||||
except Failed as e:
|
||||
util.print_stacktrace()
|
||||
util.print_multiline(e, error=True)
|
||||
logger.info(f"{display_name} Library Connection Failed")
|
||||
continue
|
||||
display_name = f"{params['name']} ({params['mapping_name']})" if lib and "library_name" in lib and lib["library_name"] else params["mapping_name"]
|
||||
|
||||
if self.general["radarr"]["url"] or (lib and "radarr" in lib):
|
||||
logger.info("")
|
||||
util.separator("Radarr Configuration", space=False, border=False)
|
||||
logger.info("")
|
||||
logger.info(f"Connecting to {display_name} library's Radarr...")
|
||||
util.separator(f"{display_name} Configuration")
|
||||
logger.info("")
|
||||
logger.info(f"Connecting to {display_name} Library...")
|
||||
|
||||
params["asset_directory"] = check_for_attribute(lib, "asset_directory", parent="settings", var_type="list_path", default=self.general["asset_directory"], default_is_none=True, save=False)
|
||||
if params["asset_directory"] is None:
|
||||
logger.warning("Config Warning: Assets will not be used asset_directory attribute must be set under config or under this specific Library")
|
||||
|
||||
params["asset_folders"] = check_for_attribute(lib, "asset_folders", parent="settings", var_type="bool", default=self.general["asset_folders"], do_print=False, save=False)
|
||||
params["sync_mode"] = check_for_attribute(lib, "sync_mode", parent="settings", test_list=sync_modes, default=self.general["sync_mode"], do_print=False, save=False)
|
||||
params["show_unmanaged"] = check_for_attribute(lib, "show_unmanaged", parent="settings", var_type="bool", default=self.general["show_unmanaged"], do_print=False, save=False)
|
||||
params["show_filtered"] = check_for_attribute(lib, "show_filtered", parent="settings", var_type="bool", default=self.general["show_filtered"], do_print=False, save=False)
|
||||
params["show_missing"] = check_for_attribute(lib, "show_missing", parent="settings", var_type="bool", default=self.general["show_missing"], do_print=False, save=False)
|
||||
params["show_missing_assets"] = check_for_attribute(lib, "show_missing_assets", parent="settings", var_type="bool", default=self.general["show_missing_assets"], do_print=False, save=False)
|
||||
params["save_missing"] = check_for_attribute(lib, "save_missing", parent="settings", var_type="bool", default=self.general["save_missing"], do_print=False, save=False)
|
||||
params["missing_only_released"] = check_for_attribute(lib, "missing_only_released", parent="settings", var_type="bool", default=self.general["missing_only_released"], do_print=False, save=False)
|
||||
params["create_asset_folders"] = check_for_attribute(lib, "create_asset_folders", parent="settings", var_type="bool", default=self.general["create_asset_folders"], do_print=False, save=False)
|
||||
params["collection_minimum"] = check_for_attribute(lib, "collection_minimum", parent="settings", var_type="int", default=self.general["collection_minimum"], do_print=False, save=False)
|
||||
params["delete_below_minimum"] = check_for_attribute(lib, "delete_below_minimum", parent="settings", var_type="bool", default=self.general["delete_below_minimum"], do_print=False, save=False)
|
||||
params["delete_unmanaged_collections"] = check_for_attribute(lib, "delete_unmanaged_collections", parent="settings", var_type="bool", default=False, do_print=False, save=False)
|
||||
params["delete_collections_with_less"] = check_for_attribute(lib, "delete_collections_with_less", parent="settings", var_type="int", default_is_none=True, do_print=False, save=False)
|
||||
params["error_webhooks"] = check_for_attribute(lib, "error", parent="webhooks", var_type="list", default=self.webhooks["error"], do_print=False, save=False, default_is_none=True)
|
||||
params["collection_creation_webhooks"] = check_for_attribute(lib, "collection_creation", parent="webhooks", var_type="list", default=self.webhooks["collection_creation"], do_print=False, save=False, default_is_none=True)
|
||||
params["collection_addition_webhooks"] = check_for_attribute(lib, "collection_addition", parent="webhooks", var_type="list", default=self.webhooks["collection_addition"], do_print=False, save=False, default_is_none=True)
|
||||
params["collection_removal_webhooks"] = check_for_attribute(lib, "collection_removal", parent="webhooks", var_type="list", default=self.webhooks["collection_removal"], do_print=False, save=False, default_is_none=True)
|
||||
params["assets_for_all"] = check_for_attribute(lib, "assets_for_all", parent="settings", var_type="bool", default=self.general["assets_for_all"], do_print=False, save=False)
|
||||
params["mass_genre_update"] = check_for_attribute(lib, "mass_genre_update", test_list=mass_update_options, default_is_none=True, save=False, do_print=False)
|
||||
params["mass_audience_rating_update"] = check_for_attribute(lib, "mass_audience_rating_update", test_list=mass_update_options, default_is_none=True, save=False, do_print=False)
|
||||
params["mass_critic_rating_update"] = check_for_attribute(lib, "mass_critic_rating_update", test_list=mass_update_options, default_is_none=True, save=False, do_print=False)
|
||||
params["mass_trakt_rating_update"] = check_for_attribute(lib, "mass_trakt_rating_update", var_type="bool", default=False, save=False, do_print=False)
|
||||
params["split_duplicates"] = check_for_attribute(lib, "split_duplicates", var_type="bool", default=False, save=False, do_print=False)
|
||||
params["radarr_add_all"] = check_for_attribute(lib, "radarr_add_all", var_type="bool", default=False, save=False, do_print=False)
|
||||
params["sonarr_add_all"] = check_for_attribute(lib, "sonarr_add_all", var_type="bool", default=False, save=False, do_print=False)
|
||||
|
||||
if lib and "operations" in lib and lib["operations"]:
|
||||
if isinstance(lib["operations"], dict):
|
||||
if "assets_for_all" in lib["operations"]:
|
||||
params["assets_for_all"] = check_for_attribute(lib["operations"], "assets_for_all", var_type="bool", default=False, save=False)
|
||||
if "delete_unmanaged_collections" in lib["operations"]:
|
||||
params["delete_unmanaged_collections"] = check_for_attribute(lib["operations"], "delete_unmanaged_collections", var_type="bool", default=False, save=False)
|
||||
if "delete_collections_with_less" in lib["operations"]:
|
||||
params["delete_collections_with_less"] = check_for_attribute(lib["operations"], "delete_collections_with_less", var_type="int", default_is_none=True, save=False)
|
||||
if "mass_genre_update" in lib["operations"]:
|
||||
params["mass_genre_update"] = check_for_attribute(lib["operations"], "mass_genre_update", test_list=mass_update_options, default_is_none=True, save=False)
|
||||
if "mass_audience_rating_update" in lib["operations"]:
|
||||
params["mass_audience_rating_update"] = check_for_attribute(lib["operations"], "mass_audience_rating_update", test_list=mass_update_options, default_is_none=True, save=False)
|
||||
if "mass_critic_rating_update" in lib["operations"]:
|
||||
params["mass_critic_rating_update"] = check_for_attribute(lib["operations"], "mass_critic_rating_update", test_list=mass_update_options, default_is_none=True, save=False)
|
||||
if "mass_trakt_rating_update" in lib["operations"]:
|
||||
params["mass_trakt_rating_update"] = check_for_attribute(lib["operations"], "mass_trakt_rating_update", var_type="bool", default=False, save=False)
|
||||
if "split_duplicates" in lib["operations"]:
|
||||
params["split_duplicates"] = check_for_attribute(lib["operations"], "split_duplicates", var_type="bool", default=False, save=False)
|
||||
if "radarr_add_all" in lib["operations"]:
|
||||
params["radarr_add_all"] = check_for_attribute(lib["operations"], "radarr_add_all", var_type="bool", default=False, save=False)
|
||||
if "sonarr_add_all" in lib["operations"]:
|
||||
params["sonarr_add_all"] = check_for_attribute(lib["operations"], "sonarr_add_all", var_type="bool", default=False, save=False)
|
||||
else:
|
||||
logger.error("Config Error: operations must be a dictionary")
|
||||
|
||||
def error_check(attr, service):
|
||||
params[attr] = None
|
||||
err = f"Config Error: {attr} cannot be omdb without a successful {service} Connection"
|
||||
self.errors.append(err)
|
||||
logger.error(err)
|
||||
|
||||
if self.OMDb is None and params["mass_genre_update"] == "omdb":
|
||||
error_check("mass_genre_update", "OMDb")
|
||||
if self.OMDb is None and params["mass_audience_rating_update"] == "omdb":
|
||||
error_check("mass_audience_rating_update", "OMDb")
|
||||
if self.OMDb is None and params["mass_critic_rating_update"] == "omdb":
|
||||
error_check("mass_critic_rating_update", "OMDb")
|
||||
if self.Trakt is None and params["mass_trakt_rating_update"]:
|
||||
error_check("mass_trakt_rating_update", "Trakt")
|
||||
|
||||
try:
|
||||
library.Radarr = Radarr(self, {
|
||||
"url": check_for_attribute(lib, "url", parent="radarr", var_type="url", default=self.general["radarr"]["url"], req_default=True, save=False),
|
||||
"token": check_for_attribute(lib, "token", parent="radarr", default=self.general["radarr"]["token"], req_default=True, save=False),
|
||||
"add": check_for_attribute(lib, "add", parent="radarr", var_type="bool", default=self.general["radarr"]["add"], save=False),
|
||||
"add_existing": check_for_attribute(lib, "add_existing", parent="radarr", var_type="bool", default=self.general["radarr"]["add_existing"], save=False),
|
||||
"root_folder_path": check_for_attribute(lib, "root_folder_path", parent="radarr", default=self.general["radarr"]["root_folder_path"], req_default=True, save=False),
|
||||
"monitor": check_for_attribute(lib, "monitor", parent="radarr", var_type="bool", default=self.general["radarr"]["monitor"], save=False),
|
||||
"availability": check_for_attribute(lib, "availability", parent="radarr", test_list=radarr.availability_descriptions, default=self.general["radarr"]["availability"], save=False),
|
||||
"quality_profile": check_for_attribute(lib, "quality_profile", parent="radarr",default=self.general["radarr"]["quality_profile"], req_default=True, save=False),
|
||||
"tag": check_for_attribute(lib, "tag", parent="radarr", var_type="lower_list", default=self.general["radarr"]["tag"], default_is_none=True, save=False),
|
||||
"search": check_for_attribute(lib, "search", parent="radarr", var_type="bool", default=self.general["radarr"]["search"], save=False)
|
||||
})
|
||||
if lib and "metadata_path" in lib:
|
||||
params["metadata_path"] = []
|
||||
if lib["metadata_path"] is None:
|
||||
raise Failed("Config Error: metadata_path attribute is blank")
|
||||
paths_to_check = lib["metadata_path"] if isinstance(lib["metadata_path"], list) else [lib["metadata_path"]]
|
||||
for path in paths_to_check:
|
||||
if isinstance(path, dict):
|
||||
def check_dict(attr, name):
|
||||
if attr in path:
|
||||
if path[attr] is None:
|
||||
err = f"Config Error: metadata_path {attr} is blank"
|
||||
self.errors.append(err)
|
||||
logger.error(err)
|
||||
else:
|
||||
params["metadata_path"].append((name, path[attr]))
|
||||
check_dict("url", "URL")
|
||||
check_dict("git", "Git")
|
||||
check_dict("file", "File")
|
||||
check_dict("folder", "Folder")
|
||||
else:
|
||||
params["metadata_path"].append(("File", path))
|
||||
else:
|
||||
params["metadata_path"] = [("File", os.path.join(default_dir, f"{library_name}.yml"))]
|
||||
params["default_dir"] = default_dir
|
||||
params["plex"] = {
|
||||
"url": check_for_attribute(lib, "url", parent="plex", var_type="url", default=self.general["plex"]["url"], req_default=True, save=False),
|
||||
"token": check_for_attribute(lib, "token", parent="plex", default=self.general["plex"]["token"], req_default=True, save=False),
|
||||
"timeout": check_for_attribute(lib, "timeout", parent="plex", var_type="int", default=self.general["plex"]["timeout"], save=False),
|
||||
"clean_bundles": check_for_attribute(lib, "clean_bundles", parent="plex", var_type="bool", default=self.general["plex"]["clean_bundles"], save=False),
|
||||
"empty_trash": check_for_attribute(lib, "empty_trash", parent="plex", var_type="bool", default=self.general["plex"]["empty_trash"], save=False),
|
||||
"optimize": check_for_attribute(lib, "optimize", parent="plex", var_type="bool", default=self.general["plex"]["optimize"], save=False)
|
||||
}
|
||||
library = Plex(self, params)
|
||||
logger.info("")
|
||||
logger.info(f"{display_name} Library Connection Successful")
|
||||
except Failed as e:
|
||||
self.errors.append(e)
|
||||
util.print_stacktrace()
|
||||
util.print_multiline(e, error=True)
|
||||
logger.info(f"{display_name} Library Connection Failed")
|
||||
continue
|
||||
|
||||
if self.general["radarr"]["url"] or (lib and "radarr" in lib):
|
||||
logger.info("")
|
||||
logger.info(f"{display_name} library's Radarr Connection {'Failed' if library.Radarr is None else 'Successful'}")
|
||||
|
||||
if self.general["sonarr"]["url"] or (lib and "sonarr" in lib):
|
||||
logger.info("")
|
||||
util.separator("Sonarr Configuration", space=False, border=False)
|
||||
logger.info("")
|
||||
logger.info(f"Connecting to {display_name} library's Sonarr...")
|
||||
logger.info("")
|
||||
try:
|
||||
library.Sonarr = Sonarr(self, {
|
||||
"url": check_for_attribute(lib, "url", parent="sonarr", var_type="url", default=self.general["sonarr"]["url"], req_default=True, save=False),
|
||||
"token": check_for_attribute(lib, "token", parent="sonarr", default=self.general["sonarr"]["token"], req_default=True, save=False),
|
||||
"add": check_for_attribute(lib, "add", parent="sonarr", var_type="bool", default=self.general["sonarr"]["add"], save=False),
|
||||
"add_existing": check_for_attribute(lib, "add_existing", parent="sonarr", var_type="bool", default=self.general["sonarr"]["add_existing"], save=False),
|
||||
"root_folder_path": check_for_attribute(lib, "root_folder_path", parent="sonarr", default=self.general["sonarr"]["root_folder_path"], req_default=True, save=False),
|
||||
"monitor": check_for_attribute(lib, "monitor", parent="sonarr", test_list=sonarr.monitor_descriptions, default=self.general["sonarr"]["monitor"], save=False),
|
||||
"quality_profile": check_for_attribute(lib, "quality_profile", parent="sonarr", default=self.general["sonarr"]["quality_profile"], req_default=True, save=False),
|
||||
"language_profile": check_for_attribute(lib, "language_profile", parent="sonarr", default=self.general["sonarr"]["language_profile"], save=False) if self.general["sonarr"]["language_profile"] else check_for_attribute(lib, "language_profile", parent="sonarr", default_is_none=True, save=False),
|
||||
"series_type": check_for_attribute(lib, "series_type", parent="sonarr", test_list=sonarr.series_type_descriptions, default=self.general["sonarr"]["series_type"], save=False),
|
||||
"season_folder": check_for_attribute(lib, "season_folder", parent="sonarr", var_type="bool", default=self.general["sonarr"]["season_folder"], save=False),
|
||||
"tag": check_for_attribute(lib, "tag", parent="sonarr", var_type="lower_list", default=self.general["sonarr"]["tag"], default_is_none=True, save=False),
|
||||
"search": check_for_attribute(lib, "search", parent="sonarr", var_type="bool", default=self.general["sonarr"]["search"], save=False),
|
||||
"cutoff_search": check_for_attribute(lib, "cutoff_search", parent="sonarr", var_type="bool", default=self.general["sonarr"]["cutoff_search"], save=False)
|
||||
})
|
||||
except Failed as e:
|
||||
util.print_stacktrace()
|
||||
util.print_multiline(e, error=True)
|
||||
util.separator("Radarr Configuration", space=False, border=False)
|
||||
logger.info("")
|
||||
logger.info(f"{display_name} library's Sonarr Connection {'Failed' if library.Sonarr is None else 'Successful'}")
|
||||
|
||||
if self.general["tautulli"]["url"] or (lib and "tautulli" in lib):
|
||||
logger.info("")
|
||||
util.separator("Tautulli Configuration", space=False, border=False)
|
||||
logger.info("")
|
||||
logger.info(f"Connecting to {display_name} library's Tautulli...")
|
||||
logger.info("")
|
||||
try:
|
||||
library.Tautulli = Tautulli(self, {
|
||||
"url": check_for_attribute(lib, "url", parent="tautulli", var_type="url", default=self.general["tautulli"]["url"], req_default=True, save=False),
|
||||
"apikey": check_for_attribute(lib, "apikey", parent="tautulli", default=self.general["tautulli"]["apikey"], req_default=True, save=False)
|
||||
})
|
||||
except Failed as e:
|
||||
util.print_stacktrace()
|
||||
util.print_multiline(e, error=True)
|
||||
logger.info(f"Connecting to {display_name} library's Radarr...")
|
||||
logger.info("")
|
||||
logger.info(f"{display_name} library's Tautulli Connection {'Failed' if library.Tautulli is None else 'Successful'}")
|
||||
try:
|
||||
library.Radarr = Radarr(self, {
|
||||
"url": check_for_attribute(lib, "url", parent="radarr", var_type="url", default=self.general["radarr"]["url"], req_default=True, save=False),
|
||||
"token": check_for_attribute(lib, "token", parent="radarr", default=self.general["radarr"]["token"], req_default=True, save=False),
|
||||
"add": check_for_attribute(lib, "add", parent="radarr", var_type="bool", default=self.general["radarr"]["add"], save=False),
|
||||
"add_existing": check_for_attribute(lib, "add_existing", parent="radarr", var_type="bool", default=self.general["radarr"]["add_existing"], save=False),
|
||||
"root_folder_path": check_for_attribute(lib, "root_folder_path", parent="radarr", default=self.general["radarr"]["root_folder_path"], req_default=True, save=False),
|
||||
"monitor": check_for_attribute(lib, "monitor", parent="radarr", var_type="bool", default=self.general["radarr"]["monitor"], save=False),
|
||||
"availability": check_for_attribute(lib, "availability", parent="radarr", test_list=radarr.availability_descriptions, default=self.general["radarr"]["availability"], save=False),
|
||||
"quality_profile": check_for_attribute(lib, "quality_profile", parent="radarr",default=self.general["radarr"]["quality_profile"], req_default=True, save=False),
|
||||
"tag": check_for_attribute(lib, "tag", parent="radarr", var_type="lower_list", default=self.general["radarr"]["tag"], default_is_none=True, save=False),
|
||||
"search": check_for_attribute(lib, "search", parent="radarr", var_type="bool", default=self.general["radarr"]["search"], save=False)
|
||||
})
|
||||
except Failed as e:
|
||||
self.errors.append(e)
|
||||
util.print_stacktrace()
|
||||
util.print_multiline(e, error=True)
|
||||
logger.info("")
|
||||
logger.info(f"{display_name} library's Radarr Connection {'Failed' if library.Radarr is None else 'Successful'}")
|
||||
|
||||
logger.info("")
|
||||
self.libraries.append(library)
|
||||
if self.general["sonarr"]["url"] or (lib and "sonarr" in lib):
|
||||
logger.info("")
|
||||
util.separator("Sonarr Configuration", space=False, border=False)
|
||||
logger.info("")
|
||||
logger.info(f"Connecting to {display_name} library's Sonarr...")
|
||||
logger.info("")
|
||||
try:
|
||||
library.Sonarr = Sonarr(self, {
|
||||
"url": check_for_attribute(lib, "url", parent="sonarr", var_type="url", default=self.general["sonarr"]["url"], req_default=True, save=False),
|
||||
"token": check_for_attribute(lib, "token", parent="sonarr", default=self.general["sonarr"]["token"], req_default=True, save=False),
|
||||
"add": check_for_attribute(lib, "add", parent="sonarr", var_type="bool", default=self.general["sonarr"]["add"], save=False),
|
||||
"add_existing": check_for_attribute(lib, "add_existing", parent="sonarr", var_type="bool", default=self.general["sonarr"]["add_existing"], save=False),
|
||||
"root_folder_path": check_for_attribute(lib, "root_folder_path", parent="sonarr", default=self.general["sonarr"]["root_folder_path"], req_default=True, save=False),
|
||||
"monitor": check_for_attribute(lib, "monitor", parent="sonarr", test_list=sonarr.monitor_descriptions, default=self.general["sonarr"]["monitor"], save=False),
|
||||
"quality_profile": check_for_attribute(lib, "quality_profile", parent="sonarr", default=self.general["sonarr"]["quality_profile"], req_default=True, save=False),
|
||||
"language_profile": check_for_attribute(lib, "language_profile", parent="sonarr", default=self.general["sonarr"]["language_profile"], save=False) if self.general["sonarr"]["language_profile"] else check_for_attribute(lib, "language_profile", parent="sonarr", default_is_none=True, save=False),
|
||||
"series_type": check_for_attribute(lib, "series_type", parent="sonarr", test_list=sonarr.series_type_descriptions, default=self.general["sonarr"]["series_type"], save=False),
|
||||
"season_folder": check_for_attribute(lib, "season_folder", parent="sonarr", var_type="bool", default=self.general["sonarr"]["season_folder"], save=False),
|
||||
"tag": check_for_attribute(lib, "tag", parent="sonarr", var_type="lower_list", default=self.general["sonarr"]["tag"], default_is_none=True, save=False),
|
||||
"search": check_for_attribute(lib, "search", parent="sonarr", var_type="bool", default=self.general["sonarr"]["search"], save=False),
|
||||
"cutoff_search": check_for_attribute(lib, "cutoff_search", parent="sonarr", var_type="bool", default=self.general["sonarr"]["cutoff_search"], save=False)
|
||||
})
|
||||
except Failed as e:
|
||||
self.errors.append(e)
|
||||
util.print_stacktrace()
|
||||
util.print_multiline(e, error=True)
|
||||
logger.info("")
|
||||
logger.info(f"{display_name} library's Sonarr Connection {'Failed' if library.Sonarr is None else 'Successful'}")
|
||||
|
||||
util.separator()
|
||||
if self.general["tautulli"]["url"] or (lib and "tautulli" in lib):
|
||||
logger.info("")
|
||||
util.separator("Tautulli Configuration", space=False, border=False)
|
||||
logger.info("")
|
||||
logger.info(f"Connecting to {display_name} library's Tautulli...")
|
||||
logger.info("")
|
||||
try:
|
||||
library.Tautulli = Tautulli(self, {
|
||||
"url": check_for_attribute(lib, "url", parent="tautulli", var_type="url", default=self.general["tautulli"]["url"], req_default=True, save=False),
|
||||
"apikey": check_for_attribute(lib, "apikey", parent="tautulli", default=self.general["tautulli"]["apikey"], req_default=True, save=False)
|
||||
})
|
||||
except Failed as e:
|
||||
self.errors.append(e)
|
||||
util.print_stacktrace()
|
||||
util.print_multiline(e, error=True)
|
||||
logger.info("")
|
||||
logger.info(f"{display_name} library's Tautulli Connection {'Failed' if library.Tautulli is None else 'Successful'}")
|
||||
|
||||
if len(self.libraries) > 0:
|
||||
logger.info(f"{len(self.libraries)} Plex Library Connection{'s' if len(self.libraries) > 1 else ''} Successful")
|
||||
else:
|
||||
raise Failed("Plex Error: No Plex libraries were connected to")
|
||||
library.Webhooks = Webhooks(self, {"error_webhooks": library.error_webhooks}, library=library, notifiarr=self.NotifiarrFactory)
|
||||
|
||||
util.separator()
|
||||
logger.info("")
|
||||
self.libraries.append(library)
|
||||
|
||||
util.separator()
|
||||
|
||||
if len(self.libraries) > 0:
|
||||
logger.info(f"{len(self.libraries)} Plex Library Connection{'s' if len(self.libraries) > 1 else ''} Successful")
|
||||
else:
|
||||
raise Failed("Plex Error: No Plex libraries were connected to")
|
||||
|
||||
util.separator()
|
||||
|
||||
if self.errors:
|
||||
self.notify(self.errors)
|
||||
except Exception as e:
|
||||
self.notify(e)
|
||||
raise
|
||||
|
||||
def notify(self, text, library=None, collection=None, critical=True):
|
||||
for error in util.get_list(text, split=False):
|
||||
self.Webhooks.error_hooks(error, library=library, collection=collection, critical=critical)
|
||||
|
||||
def get_html(self, url, headers=None, params=None):
|
||||
return html.fromstring(self.get(url, headers=headers, params=params).content)
|
||||
|
||||
def get_json(self, url, headers=None):
|
||||
return self.get(url, headers=headers).json()
|
||||
def get_json(self, url, json=None, headers=None, params=None):
|
||||
return self.get(url, json=json, headers=headers, params=params).json()
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
||||
def get(self, url, headers=None, params=None):
|
||||
return self.session.get(url, headers=headers, params=params)
|
||||
def get(self, url, json=None, headers=None, params=None):
|
||||
return self.session.get(url, json=json, headers=headers, params=params)
|
||||
|
||||
def get_image_encoded(self, url):
|
||||
return base64.b64encode(self.get(url).content).decode('utf-8')
|
||||
|
||||
def post_html(self, url, data=None, json=None, headers=None):
|
||||
return html.fromstring(self.post(url, data=data, json=json, headers=headers).content)
|
||||
|
|
|
@ -62,10 +62,10 @@ class Convert:
|
|||
def myanimelist_to_ids(self, mal_ids, library):
|
||||
ids = []
|
||||
for mal_id in mal_ids:
|
||||
if mal_id in library.mal_map:
|
||||
ids.append((library.mal_map[mal_id], "ratingKey"))
|
||||
elif mal_id in self.mal_to_anidb:
|
||||
ids.extend(self.anidb_to_ids(self.mal_to_anidb[mal_id], library))
|
||||
if int(mal_id) in library.mal_map:
|
||||
ids.append((library.mal_map[int(mal_id)], "ratingKey"))
|
||||
elif int(mal_id) in self.mal_to_anidb:
|
||||
ids.extend(self.anidb_to_ids(self.mal_to_anidb[int(mal_id)], library))
|
||||
else:
|
||||
logger.error(f"Convert Error: AniDB ID not found for MyAnimeList ID: {mal_id}")
|
||||
return ids
|
||||
|
@ -193,19 +193,23 @@ class Convert:
|
|||
tvdb_id = []
|
||||
imdb_id = []
|
||||
anidb_id = None
|
||||
guid = requests.utils.urlparse(item.guid)
|
||||
item_type = guid.scheme.split(".")[-1]
|
||||
check_id = guid.netloc
|
||||
if self.config.Cache:
|
||||
cache_id, imdb_check, media_type, expired = self.config.Cache.query_guid_map(item.guid)
|
||||
if cache_id and not expired:
|
||||
media_id_type = "movie" if "movie" in media_type else "show"
|
||||
if item_type == "hama" and check_id.startswith("anidb"):
|
||||
anidb_id = int(re.search("-(.*)", check_id).group(1))
|
||||
library.anidb_map[anidb_id] = item.ratingKey
|
||||
elif item_type == "myanimelist":
|
||||
library.mal_map[int(check_id)] = item.ratingKey
|
||||
return media_id_type, cache_id, imdb_check
|
||||
try:
|
||||
guid = requests.utils.urlparse(item.guid)
|
||||
item_type = guid.scheme.split(".")[-1]
|
||||
check_id = guid.netloc
|
||||
|
||||
if item_type == "plex":
|
||||
try:
|
||||
for guid_tag in library.get_guids(item):
|
||||
for guid_tag in item.guids:
|
||||
url_parsed = requests.utils.urlparse(guid_tag.id)
|
||||
if url_parsed.scheme == "tvdb": tvdb_id.append(int(url_parsed.netloc))
|
||||
elif url_parsed.scheme == "imdb": imdb_id.append(url_parsed.netloc)
|
||||
|
@ -230,8 +234,8 @@ class Convert:
|
|||
raise Failed(f"Hama Agent ID: {check_id} not supported")
|
||||
elif item_type == "myanimelist":
|
||||
library.mal_map[int(check_id)] = item.ratingKey
|
||||
if check_id in self.mal_to_anidb:
|
||||
anidb_id = self.mal_to_anidb[check_id]
|
||||
if int(check_id) in self.mal_to_anidb:
|
||||
anidb_id = self.mal_to_anidb[int(check_id)]
|
||||
else:
|
||||
raise Failed(f"Convert Error: AniDB ID not found for MyAnimeList ID: {check_id}")
|
||||
elif item_type == "local": raise Failed("No match in Plex")
|
||||
|
|
|
@ -12,6 +12,8 @@ class ICheckMovies:
|
|||
self.config = config
|
||||
|
||||
def _request(self, url, language, xpath):
|
||||
if self.config.trace_mode:
|
||||
logger.debug(f"URL: {url}")
|
||||
return self.config.get_html(url, headers=util.header(language)).xpath(xpath)
|
||||
|
||||
def _parse_list(self, list_url, language):
|
||||
|
|
|
@ -8,17 +8,11 @@ logger = logging.getLogger("Plex Meta Manager")
|
|||
builders = ["imdb_list", "imdb_id"]
|
||||
base_url = "https://www.imdb.com"
|
||||
urls = {
|
||||
"list": f"{base_url}/list/ls",
|
||||
"search": f"{base_url}/search/title/",
|
||||
"keyword": f"{base_url}/search/keyword/"
|
||||
"lists": f"{base_url}/list/ls",
|
||||
"searches": f"{base_url}/search/title/",
|
||||
"keyword_searches": f"{base_url}/search/keyword/",
|
||||
"filmography_searches": f"{base_url}/filmosearch/"
|
||||
}
|
||||
xpath = {
|
||||
"imdb_id": "//div[contains(@class, 'lister-item-image')]//a/img//@data-tconst",
|
||||
"list": "//div[@class='desc lister-total-num-results']/text()",
|
||||
"search": "//div[@class='desc']/span/text()",
|
||||
"keyword": "//div[@class='desc']/text()"
|
||||
}
|
||||
item_counts = {"list": 100, "search": 250, "keyword": 50}
|
||||
|
||||
class IMDb:
|
||||
def __init__(self, config):
|
||||
|
@ -31,22 +25,25 @@ class IMDb:
|
|||
imdb_dict = {"url": imdb_dict}
|
||||
dict_methods = {dm.lower(): dm for dm in imdb_dict}
|
||||
imdb_url = util.parse("url", imdb_dict, methods=dict_methods, parent="imdb_list").strip()
|
||||
if not imdb_url.startswith((urls["list"], urls["search"], urls["keyword"])):
|
||||
raise Failed(f"IMDb Error: {imdb_url} must begin with either:\n{urls['list']} (For Lists)\n{urls['search']} (For Searches)\n{urls['keyword']} (For Keyword Searches)")
|
||||
if not imdb_url.startswith(tuple([v for k, v in urls.items()])):
|
||||
fails = "\n".join([f"{v} (For {k.replace('_', ' ').title()})" for k, v in urls.items()])
|
||||
raise Failed(f"IMDb Error: {imdb_url} must begin with either:{fails}")
|
||||
self._total(imdb_url, language)
|
||||
list_count = util.parse("limit", imdb_dict, datatype="int", methods=dict_methods, default=0, parent="imdb_list", minimum=0) if "limit" in dict_methods else 0
|
||||
valid_lists.append({"url": imdb_url, "limit": list_count})
|
||||
return valid_lists
|
||||
|
||||
def _total(self, imdb_url, language):
|
||||
headers = util.header(language)
|
||||
if imdb_url.startswith(urls["keyword"]):
|
||||
page_type = "keyword"
|
||||
elif imdb_url.startswith(urls["list"]):
|
||||
page_type = "list"
|
||||
if imdb_url.startswith(urls["lists"]):
|
||||
xpath_total = "//div[@class='desc lister-total-num-results']/text()"
|
||||
per_page = 100
|
||||
elif imdb_url.startswith(urls["searches"]):
|
||||
xpath_total = "//div[@class='desc']/span/text()"
|
||||
per_page = 250
|
||||
else:
|
||||
page_type = "search"
|
||||
results = self.config.get_html(imdb_url, headers=headers).xpath(xpath[page_type])
|
||||
xpath_total = "//div[@class='desc']/text()"
|
||||
per_page = 50
|
||||
results = self.config.get_html(imdb_url, headers=util.header(language)).xpath(xpath_total)
|
||||
total = 0
|
||||
for result in results:
|
||||
if "title" in result:
|
||||
|
@ -56,8 +53,8 @@ class IMDb:
|
|||
except IndexError:
|
||||
pass
|
||||
if total > 0:
|
||||
return total, item_counts[page_type]
|
||||
raise ValueError(f"IMDb Error: Failed to parse URL: {imdb_url}")
|
||||
return total, per_page
|
||||
raise Failed(f"IMDb Error: Failed to parse URL: {imdb_url}")
|
||||
|
||||
def _ids_from_url(self, imdb_url, language, limit):
|
||||
total, item_count = self._total(imdb_url, language)
|
||||
|
@ -66,10 +63,13 @@ class IMDb:
|
|||
parsed_url = urlparse(imdb_url)
|
||||
params = parse_qs(parsed_url.query)
|
||||
imdb_base = parsed_url._replace(query=None).geturl()
|
||||
params.pop("start", None)
|
||||
params.pop("count", None)
|
||||
params.pop("page", None)
|
||||
|
||||
params.pop("start", None) # noqa
|
||||
params.pop("count", None) # noqa
|
||||
params.pop("page", None) # noqa
|
||||
if self.config.trace_mode:
|
||||
logger.debug(f"URL: {imdb_base}")
|
||||
logger.debug(f"Params: {params}")
|
||||
search_url = imdb_base.startswith(urls["searches"])
|
||||
if limit < 1 or total < limit:
|
||||
limit = total
|
||||
remainder = limit % item_count
|
||||
|
@ -79,13 +79,14 @@ class IMDb:
|
|||
for i in range(1, num_of_pages + 1):
|
||||
start_num = (i - 1) * item_count + 1
|
||||
util.print_return(f"Parsing Page {i}/{num_of_pages} {start_num}-{limit if i == num_of_pages else i * item_count}")
|
||||
if imdb_base.startswith((urls["list"], urls["keyword"])):
|
||||
params["page"] = i
|
||||
if search_url:
|
||||
params["count"] = remainder if i == num_of_pages else item_count # noqa
|
||||
params["start"] = start_num # noqa
|
||||
else:
|
||||
params["count"] = remainder if i == num_of_pages else item_count
|
||||
params["start"] = start_num
|
||||
ids_found = self.config.get_html(imdb_base, headers=headers, params=params).xpath(xpath["imdb_id"])
|
||||
if imdb_base.startswith((urls["list"], urls["keyword"])) and i == num_of_pages:
|
||||
params["page"] = i # noqa
|
||||
response = self.config.get_html(imdb_base, headers=headers, params=params)
|
||||
ids_found = response.xpath("//div[contains(@class, 'lister-item-image')]//a/img//@data-tconst")
|
||||
if not search_url and i == num_of_pages:
|
||||
ids_found = ids_found[:remainder]
|
||||
imdb_ids.extend(ids_found)
|
||||
time.sleep(2)
|
||||
|
@ -93,7 +94,7 @@ class IMDb:
|
|||
if len(imdb_ids) > 0:
|
||||
logger.debug(f"{len(imdb_ids)} IMDb IDs Found: {imdb_ids}")
|
||||
return imdb_ids
|
||||
raise ValueError(f"IMDb Error: No IMDb IDs Found at {imdb_url}")
|
||||
raise Failed(f"IMDb Error: No IMDb IDs Found at {imdb_url}")
|
||||
|
||||
def get_imdb_ids(self, method, data, language):
|
||||
if method == "imdb_id":
|
||||
|
|
|
@ -12,6 +12,8 @@ class Letterboxd:
|
|||
self.config = config
|
||||
|
||||
def _parse_list(self, list_url, language):
|
||||
if self.config.trace_mode:
|
||||
logger.debug(f"URL: {list_url}")
|
||||
response = self.config.get_html(list_url, headers=util.header(language))
|
||||
letterboxd_ids = response.xpath("//li[contains(@class, 'poster-container')]/div/@data-film-id")
|
||||
items = []
|
||||
|
@ -25,6 +27,8 @@ class Letterboxd:
|
|||
return items
|
||||
|
||||
def _tmdb(self, letterboxd_url, language):
|
||||
if self.config.trace_mode:
|
||||
logger.debug(f"URL: {letterboxd_url}")
|
||||
response = self.config.get_html(letterboxd_url, headers=util.header(language))
|
||||
ids = response.xpath("//a[@data-track-action='TMDb']/@href")
|
||||
if len(ids) > 0 and ids[0]:
|
||||
|
@ -34,6 +38,8 @@ class Letterboxd:
|
|||
raise Failed(f"Letterboxd Error: TMDb Movie ID not found at {letterboxd_url}")
|
||||
|
||||
def get_list_description(self, list_url, language):
|
||||
if self.config.trace_mode:
|
||||
logger.debug(f"URL: {list_url}")
|
||||
response = self.config.get_html(list_url, headers=util.header(language))
|
||||
descriptions = response.xpath("//meta[@property='og:description']/@content")
|
||||
return descriptions[0] if len(descriptions) > 0 and len(descriptions[0]) > 0 else None
|
||||
|
|
270
modules/library.py
Normal file
270
modules/library.py
Normal file
|
@ -0,0 +1,270 @@
|
|||
import logging, os, requests, shutil, time
|
||||
from abc import ABC, abstractmethod
|
||||
from modules import util
|
||||
from modules.meta import Metadata
|
||||
from modules.util import Failed, ImageData
|
||||
from PIL import Image
|
||||
from ruamel import yaml
|
||||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
class Library(ABC):
|
||||
def __init__(self, config, params):
|
||||
self.Radarr = None
|
||||
self.Sonarr = None
|
||||
self.Tautulli = None
|
||||
self.Webhooks = None
|
||||
self.Notifiarr = None
|
||||
self.collections = []
|
||||
self.metadatas = []
|
||||
self.metadata_files = []
|
||||
self.missing = {}
|
||||
self.movie_map = {}
|
||||
self.show_map = {}
|
||||
self.imdb_map = {}
|
||||
self.anidb_map = {}
|
||||
self.mal_map = {}
|
||||
self.movie_rating_key_map = {}
|
||||
self.show_rating_key_map = {}
|
||||
self.run_again = []
|
||||
self.run_sort = []
|
||||
self.overlays = []
|
||||
self.type = ""
|
||||
self.config = config
|
||||
self.name = params["name"]
|
||||
self.original_mapping_name = params["mapping_name"]
|
||||
self.metadata_path = params["metadata_path"]
|
||||
self.asset_directory = params["asset_directory"]
|
||||
self.default_dir = params["default_dir"]
|
||||
self.mapping_name, output = util.validate_filename(self.original_mapping_name)
|
||||
self.image_table_name = self.config.Cache.get_image_table_name(self.original_mapping_name) if self.config.Cache else None
|
||||
self.missing_path = os.path.join(self.default_dir, f"{self.original_mapping_name}_missing.yml")
|
||||
self.asset_folders = params["asset_folders"]
|
||||
self.sync_mode = params["sync_mode"]
|
||||
self.show_unmanaged = params["show_unmanaged"]
|
||||
self.show_filtered = params["show_filtered"]
|
||||
self.show_missing = params["show_missing"]
|
||||
self.show_missing_assets = params["show_missing_assets"]
|
||||
self.save_missing = params["save_missing"]
|
||||
self.missing_only_released = params["missing_only_released"]
|
||||
self.create_asset_folders = params["create_asset_folders"]
|
||||
self.assets_for_all = params["assets_for_all"]
|
||||
self.delete_unmanaged_collections = params["delete_unmanaged_collections"]
|
||||
self.delete_collections_with_less = params["delete_collections_with_less"]
|
||||
self.mass_genre_update = params["mass_genre_update"]
|
||||
self.mass_audience_rating_update = params["mass_audience_rating_update"]
|
||||
self.mass_critic_rating_update = params["mass_critic_rating_update"]
|
||||
self.mass_trakt_rating_update = params["mass_trakt_rating_update"]
|
||||
self.radarr_add_all = params["radarr_add_all"]
|
||||
self.sonarr_add_all = params["sonarr_add_all"]
|
||||
self.collection_minimum = params["collection_minimum"]
|
||||
self.delete_below_minimum = params["delete_below_minimum"]
|
||||
self.error_webhooks = params["error_webhooks"]
|
||||
self.collection_creation_webhooks = params["collection_creation_webhooks"]
|
||||
self.collection_addition_webhooks = params["collection_addition_webhooks"]
|
||||
self.collection_removal_webhooks = params["collection_removal_webhooks"]
|
||||
self.split_duplicates = params["split_duplicates"] # TODO: Here or just in Plex?
|
||||
self.clean_bundles = params["plex"]["clean_bundles"] # TODO: Here or just in Plex?
|
||||
self.empty_trash = params["plex"]["empty_trash"] # TODO: Here or just in Plex?
|
||||
self.optimize = params["plex"]["optimize"] # TODO: Here or just in Plex?
|
||||
|
||||
metadata = []
|
||||
for file_type, metadata_file in self.metadata_path:
|
||||
if file_type == "Folder":
|
||||
if os.path.isdir(metadata_file):
|
||||
yml_files = util.glob_filter(os.path.join(metadata_file, "*.yml"))
|
||||
if yml_files:
|
||||
metadata.extend([("File", yml) for yml in yml_files])
|
||||
else:
|
||||
logger.error(f"Config Error: No YAML (.yml) files found in {metadata_file}")
|
||||
else:
|
||||
logger.error(f"Config Error: Folder not found: {metadata_file}")
|
||||
else:
|
||||
metadata.append((file_type, metadata_file))
|
||||
for file_type, metadata_file in metadata:
|
||||
try:
|
||||
meta_obj = Metadata(config, self, file_type, metadata_file)
|
||||
if meta_obj.collections:
|
||||
self.collections.extend([c for c in meta_obj.collections])
|
||||
if meta_obj.metadata:
|
||||
self.metadatas.extend([c for c in meta_obj.metadata])
|
||||
self.metadata_files.append(meta_obj)
|
||||
except Failed as e:
|
||||
util.print_multiline(e, error=True)
|
||||
|
||||
if len(self.metadata_files) == 0:
|
||||
logger.info("")
|
||||
raise Failed("Metadata File Error: No valid metadata files found")
|
||||
|
||||
if self.asset_directory:
|
||||
logger.info("")
|
||||
for ad in self.asset_directory:
|
||||
logger.info(f"Using Asset Directory: {ad}")
|
||||
|
||||
if output:
|
||||
logger.info(output)
|
||||
|
||||
def upload_images(self, item, poster=None, background=None, overlay=None):
|
||||
image = None
|
||||
image_compare = None
|
||||
poster_uploaded = False
|
||||
if self.config.Cache:
|
||||
image, image_compare = self.config.Cache.query_image_map(item.ratingKey, self.image_table_name)
|
||||
|
||||
if poster is not None:
|
||||
try:
|
||||
if image_compare and str(poster.compare) != str(image_compare):
|
||||
image = None
|
||||
if image is None or image != item.thumb:
|
||||
self._upload_image(item, poster)
|
||||
poster_uploaded = True
|
||||
logger.info(f"Detail: {poster.attribute} updated {poster.message}")
|
||||
else:
|
||||
logger.info(f"Detail: {poster.prefix}poster update not needed")
|
||||
except Failed:
|
||||
util.print_stacktrace()
|
||||
logger.error(f"Detail: {poster.attribute} failed to update {poster.message}")
|
||||
|
||||
if overlay is not None:
|
||||
overlay_name, overlay_folder, overlay_image, temp_image = overlay
|
||||
self.reload(item)
|
||||
item_labels = {item_tag.tag.lower(): item_tag.tag for item_tag in item.labels}
|
||||
for item_label in item_labels:
|
||||
if item_label.endswith(" overlay") and item_label != f"{overlay_name.lower()} overlay":
|
||||
raise Failed(f"Overlay Error: Poster already has an existing Overlay: {item_labels[item_label]}")
|
||||
if poster_uploaded or image is None or image != item.thumb or f"{overlay_name.lower()} overlay" not in item_labels:
|
||||
if not item.posterUrl:
|
||||
raise Failed(f"Overlay Error: No existing poster to Overlay for {item.title}")
|
||||
response = requests.get(item.posterUrl)
|
||||
if response.status_code >= 400:
|
||||
raise Failed(f"Overlay Error: Overlay Failed for {item.title}")
|
||||
og_image = response.content
|
||||
with open(temp_image, "wb") as handler:
|
||||
handler.write(og_image)
|
||||
shutil.copyfile(temp_image, os.path.join(overlay_folder, f"{item.ratingKey}.png"))
|
||||
while util.is_locked(temp_image):
|
||||
time.sleep(1)
|
||||
try:
|
||||
new_poster = Image.open(temp_image).convert("RGBA")
|
||||
new_poster = new_poster.resize(overlay_image.size, Image.ANTIALIAS)
|
||||
new_poster.paste(overlay_image, (0, 0), overlay_image)
|
||||
new_poster.save(temp_image)
|
||||
self.upload_file_poster(item, temp_image)
|
||||
self.edit_tags("label", item, add_tags=[f"{overlay_name} Overlay"])
|
||||
poster_uploaded = True
|
||||
logger.info(f"Detail: Overlay: {overlay_name} applied to {item.title}")
|
||||
except OSError as e:
|
||||
util.print_stacktrace()
|
||||
logger.error(f"Overlay Error: {e}")
|
||||
|
||||
background_uploaded = False
|
||||
if background is not None:
|
||||
try:
|
||||
image = None
|
||||
if self.config.Cache:
|
||||
image, image_compare = self.config.Cache.query_image_map(item.ratingKey, f"{self.image_table_name}_backgrounds")
|
||||
if str(background.compare) != str(image_compare):
|
||||
image = None
|
||||
if image is None or image != item.art:
|
||||
self._upload_image(item, background)
|
||||
background_uploaded = True
|
||||
logger.info(f"Detail: {background.attribute} updated {background.message}")
|
||||
else:
|
||||
logger.info(f"Detail: {background.prefix}background update not needed")
|
||||
except Failed:
|
||||
util.print_stacktrace()
|
||||
logger.error(f"Detail: {background.attribute} failed to update {background.message}")
|
||||
|
||||
if self.config.Cache:
|
||||
if poster_uploaded:
|
||||
self.config.Cache.update_image_map(item.ratingKey, self.image_table_name, item.thumb, poster.compare if poster else "")
|
||||
if background_uploaded:
|
||||
self.config.Cache.update_image_map(item.ratingKey, f"{self.image_table_name}_backgrounds", item.art, background.compare)
|
||||
|
||||
def notify(self, text, collection=None, critical=True):
|
||||
for error in util.get_list(text, split=False):
|
||||
self.Webhooks.error_hooks(error, library=self, collection=collection, critical=critical)
|
||||
|
||||
self.config.notify(text, library=self, collection=collection, critical=critical)
|
||||
|
||||
@abstractmethod
|
||||
def _upload_image(self, item, image):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def upload_file_poster(self, item, image):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def reload(self, item):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def edit_tags(self, attr, obj, add_tags=None, remove_tags=None, sync_tags=None):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_all(self):
|
||||
pass
|
||||
|
||||
def add_missing(self, collection, items, is_movie):
|
||||
if collection not in self.missing:
|
||||
self.missing[collection] = {}
|
||||
section = "Movies Missing (TMDb IDs)" if is_movie else "Shows Missing (TVDb IDs)"
|
||||
if section not in self.missing[collection]:
|
||||
self.missing[collection][section] = {}
|
||||
for title, item_id in items:
|
||||
self.missing[collection][section][int(item_id)] = title
|
||||
with open(self.missing_path, "w"): pass
|
||||
try:
|
||||
yaml.round_trip_dump(self.missing, open(self.missing_path, "w", encoding="utf-8"))
|
||||
except yaml.scanner.ScannerError as e:
|
||||
util.print_multiline(f"YAML Error: {util.tab_new_lines(e)}", error=True)
|
||||
|
||||
def map_guids(self):
|
||||
items = self.get_all()
|
||||
logger.info(f"Mapping {self.type} Library: {self.name}")
|
||||
logger.info("")
|
||||
for i, item in enumerate(items, 1):
|
||||
util.print_return(f"Processing: {i}/{len(items)} {item.title}")
|
||||
if item.ratingKey not in self.movie_rating_key_map and item.ratingKey not in self.show_rating_key_map:
|
||||
id_type, main_id, imdb_id = self.config.Convert.get_id(item, self)
|
||||
if main_id:
|
||||
if id_type == "movie":
|
||||
self.movie_rating_key_map[item.ratingKey] = main_id[0]
|
||||
util.add_dict_list(main_id, item.ratingKey, self.movie_map)
|
||||
elif id_type == "show":
|
||||
self.show_rating_key_map[item.ratingKey] = main_id[0]
|
||||
util.add_dict_list(main_id, item.ratingKey, self.show_map)
|
||||
if imdb_id:
|
||||
util.add_dict_list(imdb_id, item.ratingKey, self.imdb_map)
|
||||
logger.info("")
|
||||
logger.info(util.adjust_space(f"Processed {len(items)} {self.type}s"))
|
||||
return items
|
||||
|
||||
def find_collection_assets(self, item, name=None, create=False):
|
||||
if name is None:
|
||||
name = item.title
|
||||
for ad in self.asset_directory:
|
||||
poster = None
|
||||
background = None
|
||||
if self.asset_folders:
|
||||
if not os.path.isdir(os.path.join(ad, name)):
|
||||
continue
|
||||
poster_filter = os.path.join(ad, name, "poster.*")
|
||||
background_filter = os.path.join(ad, name, "background.*")
|
||||
else:
|
||||
poster_filter = os.path.join(ad, f"{name}.*")
|
||||
background_filter = os.path.join(ad, f"{name}_background.*")
|
||||
matches = util.glob_filter(poster_filter)
|
||||
if len(matches) > 0:
|
||||
poster = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title}'s ", is_url=False)
|
||||
matches = util.glob_filter(background_filter)
|
||||
if len(matches) > 0:
|
||||
background = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title}'s ", is_poster=False, is_url=False)
|
||||
if poster or background:
|
||||
return poster, background
|
||||
if create and self.asset_folders and not os.path.isdir(os.path.join(self.asset_directory[0], name)):
|
||||
os.makedirs(os.path.join(self.asset_directory[0], name), exist_ok=True)
|
||||
logger.info(f"Asset Directory Created: {os.path.join(self.asset_directory[0], name)}")
|
||||
return None, None
|
|
@ -128,7 +128,11 @@ class MyAnimeList:
|
|||
|
||||
def _request(self, url, authorization=None):
|
||||
new_authorization = authorization if authorization else self.authorization
|
||||
if self.config.trace_mode:
|
||||
logger.debug(f"URL: {url}")
|
||||
response = self.config.get_json(url, headers={"Authorization": f"Bearer {new_authorization['access_token']}"})
|
||||
if self.config.trace_mode:
|
||||
logger.debug(f"Response: {response}")
|
||||
if "error" in response: raise Failed(f"MyAnimeList Error: {response['error']}")
|
||||
else: return response
|
||||
|
||||
|
|
|
@ -249,6 +249,7 @@ class Metadata:
|
|||
add_edit("originally_available", item, meta, methods, key="originallyAvailableAt", value=originally_available, var_type="date")
|
||||
add_edit("critic_rating", item, meta, methods, value=rating, key="rating", var_type="float")
|
||||
add_edit("audience_rating", item, meta, methods, key="audienceRating", var_type="float")
|
||||
add_edit("user_rating", item, meta, methods, key="userRating", var_type="float")
|
||||
add_edit("content_rating", item, meta, methods, key="contentRating")
|
||||
add_edit("original_title", item, meta, methods, key="originalTitle", value=original_title)
|
||||
add_edit("studio", item, meta, methods, value=studio)
|
||||
|
|
31
modules/notifiarr.py
Normal file
31
modules/notifiarr.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
import logging
|
||||
|
||||
from modules.util import Failed
|
||||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
base_url = "https://notifiarr.com/api/v1/"
|
||||
dev_url = "https://dev.notifiarr.com/api/v1/"
|
||||
|
||||
|
||||
class Notifiarr:
|
||||
def __init__(self, config, params):
|
||||
self.config = config
|
||||
self.apikey = params["apikey"]
|
||||
self.develop = params["develop"]
|
||||
self.test = params["test"]
|
||||
url, _ = self.get_url("user/validate/")
|
||||
response = self.config.get(url)
|
||||
response_json = response.json()
|
||||
if response.status_code >= 400 or ("result" in response_json and response_json["result"] == "error"):
|
||||
logger.debug(f"Response: {response_json}")
|
||||
raise Failed(f"({response.status_code} [{response.reason}]) {response_json}")
|
||||
if not params["test"] and not response_json["details"]["response"]:
|
||||
raise Failed("Notifiarr Error: Invalid apikey")
|
||||
|
||||
def get_url(self, path):
|
||||
url = f"{dev_url if self.develop else base_url}{'notification/test' if self.test else f'{path}{self.apikey}'}"
|
||||
logger.debug(url.replace(self.apikey, "APIKEY"))
|
||||
params = {"event": "pmm" if self.test else "collections"}
|
||||
return url, params
|
||||
|
|
@ -48,6 +48,8 @@ class OMDb:
|
|||
omdb_dict, expired = self.config.Cache.query_omdb(imdb_id)
|
||||
if omdb_dict and expired is False:
|
||||
return OMDbObj(imdb_id, omdb_dict)
|
||||
if self.config.trace_mode:
|
||||
logger.debug(f"IMDb ID: {imdb_id}")
|
||||
response = self.config.get(base_url, params={"i": imdb_id, "apikey": self.apikey})
|
||||
if response.status_code < 400:
|
||||
omdb = OMDbObj(imdb_id, response.json())
|
||||
|
|
298
modules/plex.py
298
modules/plex.py
|
@ -1,14 +1,12 @@
|
|||
import logging, os, plexapi, requests, shutil, time
|
||||
import logging, os, plexapi, requests
|
||||
from modules import builder, util
|
||||
from modules.meta import Metadata
|
||||
from modules.library import Library
|
||||
from modules.util import Failed, ImageData
|
||||
from plexapi import utils
|
||||
from plexapi.exceptions import BadRequest, NotFound, Unauthorized
|
||||
from plexapi.collection import Collection
|
||||
from plexapi.server import PlexServer
|
||||
from PIL import Image
|
||||
from retrying import retry
|
||||
from ruamel import yaml
|
||||
from urllib import parse
|
||||
from xml.etree.ElementTree import ParseError
|
||||
|
||||
|
@ -225,9 +223,9 @@ episode_sorts = {
|
|||
}
|
||||
sort_types = {"movies": (1, movie_sorts), "shows": (2, show_sorts), "seasons": (3, season_sorts), "episodes": (4, episode_sorts)}
|
||||
|
||||
class Plex:
|
||||
class Plex(Library):
|
||||
def __init__(self, config, params):
|
||||
self.config = config
|
||||
super().__init__(config, params)
|
||||
self.plex = params["plex"]
|
||||
self.url = params["plex"]["url"]
|
||||
self.token = params["plex"]["token"]
|
||||
|
@ -255,89 +253,6 @@ class Plex:
|
|||
self.is_other = self.agent == "com.plexapp.agents.none"
|
||||
if self.is_other:
|
||||
self.type = "Video"
|
||||
self.collections = []
|
||||
self.metadatas = []
|
||||
|
||||
self.metadata_files = []
|
||||
metadata = []
|
||||
for file_type, metadata_file in params["metadata_path"]:
|
||||
if file_type == "Folder":
|
||||
if os.path.isdir(metadata_file):
|
||||
yml_files = util.glob_filter(os.path.join(metadata_file, "*.yml"))
|
||||
if yml_files:
|
||||
metadata.extend([("File", yml) for yml in yml_files])
|
||||
else:
|
||||
logger.error(f"Config Error: No YAML (.yml) files found in {metadata_file}")
|
||||
else:
|
||||
logger.error(f"Config Error: Folder not found: {metadata_file}")
|
||||
else:
|
||||
metadata.append((file_type, metadata_file))
|
||||
for file_type, metadata_file in metadata:
|
||||
try:
|
||||
meta_obj = Metadata(config, self, file_type, metadata_file)
|
||||
if meta_obj.collections:
|
||||
self.collections.extend([c for c in meta_obj.collections])
|
||||
if meta_obj.metadata:
|
||||
self.metadatas.extend([c for c in meta_obj.metadata])
|
||||
self.metadata_files.append(meta_obj)
|
||||
except Failed as e:
|
||||
util.print_multiline(e, error=True)
|
||||
|
||||
if len(self.metadata_files) == 0:
|
||||
logger.info("")
|
||||
raise Failed("Metadata File Error: No valid metadata files found")
|
||||
|
||||
if params["asset_directory"]:
|
||||
logger.info("")
|
||||
for ad in params["asset_directory"]:
|
||||
logger.info(f"Using Asset Directory: {ad}")
|
||||
|
||||
self.Radarr = None
|
||||
self.Sonarr = None
|
||||
self.Tautulli = None
|
||||
self.name = params["name"]
|
||||
self.original_mapping_name = params["mapping_name"]
|
||||
self.mapping_name, output = util.validate_filename(self.original_mapping_name)
|
||||
if output:
|
||||
logger.info(output)
|
||||
self.image_table_name = self.config.Cache.get_image_table_name(self.original_mapping_name) if self.config.Cache else None
|
||||
self.missing_path = os.path.join(params["default_dir"], f"{self.name}_missing.yml")
|
||||
self.collection_minimum = params["collection_minimum"]
|
||||
self.delete_below_minimum = params["delete_below_minimum"]
|
||||
self.metadata_path = params["metadata_path"]
|
||||
self.asset_directory = params["asset_directory"]
|
||||
self.asset_folders = params["asset_folders"]
|
||||
self.assets_for_all = params["assets_for_all"]
|
||||
self.sync_mode = params["sync_mode"]
|
||||
self.show_unmanaged = params["show_unmanaged"]
|
||||
self.show_filtered = params["show_filtered"]
|
||||
self.show_missing = params["show_missing"]
|
||||
self.save_missing = params["save_missing"]
|
||||
self.missing_only_released = params["missing_only_released"]
|
||||
self.create_asset_folders = params["create_asset_folders"]
|
||||
self.mass_genre_update = params["mass_genre_update"]
|
||||
self.mass_audience_rating_update = params["mass_audience_rating_update"]
|
||||
self.mass_critic_rating_update = params["mass_critic_rating_update"]
|
||||
self.mass_trakt_rating_update = params["mass_trakt_rating_update"]
|
||||
self.split_duplicates = params["split_duplicates"]
|
||||
self.radarr_add_all = params["radarr_add_all"]
|
||||
self.sonarr_add_all = params["sonarr_add_all"]
|
||||
self.mass_update = self.mass_genre_update or self.mass_audience_rating_update or self.mass_critic_rating_update \
|
||||
or self.mass_trakt_rating_update or self.split_duplicates or self.radarr_add_all or self.sonarr_add_all
|
||||
self.clean_bundles = params["plex"]["clean_bundles"]
|
||||
self.empty_trash = params["plex"]["empty_trash"]
|
||||
self.optimize = params["plex"]["optimize"]
|
||||
self.missing = {}
|
||||
self.movie_map = {}
|
||||
self.show_map = {}
|
||||
self.imdb_map = {}
|
||||
self.anidb_map = {}
|
||||
self.mal_map = {}
|
||||
self.movie_rating_key_map = {}
|
||||
self.show_rating_key_map = {}
|
||||
self.run_again = []
|
||||
self.run_sort = []
|
||||
self.overlays = []
|
||||
|
||||
def get_all_collections(self):
|
||||
return self.search(libtype="collection")
|
||||
|
@ -364,7 +279,7 @@ class Plex:
|
|||
|
||||
def get_all(self):
|
||||
logger.info(f"Loading All {self.type}s from Library: {self.name}")
|
||||
key = f"/library/sections/{self.Plex.key}/all?type={utils.searchType(self.Plex.TYPE)}"
|
||||
key = f"/library/sections/{self.Plex.key}/all?includeGuids=1&type={utils.searchType(self.Plex.TYPE)}"
|
||||
container_start = 0
|
||||
container_size = plexapi.X_PLEX_CONTAINER_SIZE
|
||||
results = []
|
||||
|
@ -387,6 +302,13 @@ class Plex:
|
|||
def query_data(self, method, data):
|
||||
return method(data)
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||
def query_collection(self, item, collection, locked=True, add=True):
|
||||
if add:
|
||||
item.addCollection(collection, locked=locked)
|
||||
else:
|
||||
item.removeCollection(collection, locked=locked)
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
||||
def collection_mode_query(self, collection, data):
|
||||
collection.modeUpdate(mode=data)
|
||||
|
@ -395,11 +317,6 @@ class Plex:
|
|||
def collection_order_query(self, collection, data):
|
||||
collection.sortUpdate(sort=data)
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
||||
def get_guids(self, item):
|
||||
self.reload(item)
|
||||
return item.guids
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
||||
def reload(self, item):
|
||||
try:
|
||||
|
@ -422,98 +339,24 @@ class Plex:
|
|||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
||||
def _upload_image(self, item, image):
|
||||
if image.is_poster and image.is_url:
|
||||
item.uploadPoster(url=image.location)
|
||||
elif image.is_poster:
|
||||
item.uploadPoster(filepath=image.location)
|
||||
elif image.is_url:
|
||||
item.uploadArt(url=image.location)
|
||||
else:
|
||||
item.uploadArt(filepath=image.location)
|
||||
self.reload(item)
|
||||
try:
|
||||
if image.is_poster and image.is_url:
|
||||
item.uploadPoster(url=image.location)
|
||||
elif image.is_poster:
|
||||
item.uploadPoster(filepath=image.location)
|
||||
elif image.is_url:
|
||||
item.uploadArt(url=image.location)
|
||||
else:
|
||||
item.uploadArt(filepath=image.location)
|
||||
self.reload(item)
|
||||
except BadRequest as e:
|
||||
raise Failed(e)
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
||||
def upload_file_poster(self, item, image):
|
||||
item.uploadPoster(filepath=image)
|
||||
self.reload(item)
|
||||
|
||||
def upload_images(self, item, poster=None, background=None, overlay=None):
|
||||
image = None
|
||||
image_compare = None
|
||||
poster_uploaded = False
|
||||
if self.config.Cache:
|
||||
image, image_compare = self.config.Cache.query_image_map(item.ratingKey, self.image_table_name)
|
||||
|
||||
if poster is not None:
|
||||
try:
|
||||
if image_compare and str(poster.compare) != str(image_compare):
|
||||
image = None
|
||||
if image is None or image != item.thumb:
|
||||
self._upload_image(item, poster)
|
||||
poster_uploaded = True
|
||||
logger.info(f"Detail: {poster.attribute} updated {poster.message}")
|
||||
else:
|
||||
logger.info(f"Detail: {poster.prefix}poster update not needed")
|
||||
except BadRequest:
|
||||
util.print_stacktrace()
|
||||
logger.error(f"Detail: {poster.attribute} failed to update {poster.message}")
|
||||
|
||||
if overlay is not None:
|
||||
overlay_name, overlay_folder, overlay_image, temp_image = overlay
|
||||
self.reload(item)
|
||||
item_labels = {item_tag.tag.lower(): item_tag.tag for item_tag in item.labels}
|
||||
for item_label in item_labels:
|
||||
if item_label.endswith(" overlay") and item_label != f"{overlay_name.lower()} overlay":
|
||||
raise Failed(f"Overlay Error: Poster already has an existing Overlay: {item_labels[item_label]}")
|
||||
if poster_uploaded or image is None or image != item.thumb or f"{overlay_name.lower()} overlay" not in item_labels:
|
||||
if not item.posterUrl:
|
||||
raise Failed(f"Overlay Error: No existing poster to Overlay for {item.title}")
|
||||
response = requests.get(item.posterUrl)
|
||||
if response.status_code >= 400:
|
||||
raise Failed(f"Overlay Error: Overlay Failed for {item.title}")
|
||||
og_image = response.content
|
||||
with open(temp_image, "wb") as handler:
|
||||
handler.write(og_image)
|
||||
shutil.copyfile(temp_image, os.path.join(overlay_folder, f"{item.ratingKey}.png"))
|
||||
while util.is_locked(temp_image):
|
||||
time.sleep(1)
|
||||
try:
|
||||
new_poster = Image.open(temp_image).convert("RGBA")
|
||||
new_poster = new_poster.resize(overlay_image.size, Image.ANTIALIAS)
|
||||
new_poster.paste(overlay_image, (0, 0), overlay_image)
|
||||
new_poster.save(temp_image)
|
||||
self.upload_file_poster(item, temp_image)
|
||||
self.edit_tags("label", item, add_tags=[f"{overlay_name} Overlay"])
|
||||
poster_uploaded = True
|
||||
logger.info(f"Detail: Overlay: {overlay_name} applied to {item.title}")
|
||||
except OSError as e:
|
||||
util.print_stacktrace()
|
||||
logger.error(f"Overlay Error: {e}")
|
||||
|
||||
background_uploaded = False
|
||||
if background is not None:
|
||||
try:
|
||||
image = None
|
||||
if self.config.Cache:
|
||||
image, image_compare = self.config.Cache.query_image_map(item.ratingKey, f"{self.image_table_name}_backgrounds")
|
||||
if str(background.compare) != str(image_compare):
|
||||
image = None
|
||||
if image is None or image != item.art:
|
||||
self._upload_image(item, background)
|
||||
background_uploaded = True
|
||||
logger.info(f"Detail: {background.attribute} updated {background.message}")
|
||||
else:
|
||||
logger.info(f"Detail: {background.prefix}background update not needed")
|
||||
except BadRequest:
|
||||
util.print_stacktrace()
|
||||
logger.error(f"Detail: {background.attribute} failed to update {background.message}")
|
||||
|
||||
if self.config.Cache:
|
||||
if poster_uploaded:
|
||||
self.config.Cache.update_image_map(item.ratingKey, self.image_table_name, item.thumb, poster.compare if poster else "")
|
||||
if background_uploaded:
|
||||
self.config.Cache.update_image_map(item.ratingKey, f"{self.image_table_name}_backgrounds", item.art, background.compare)
|
||||
|
||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
||||
def get_search_choices(self, search_name, title=True):
|
||||
final_search = search_translation[search_name] if search_name in search_translation else search_name
|
||||
|
@ -539,6 +382,16 @@ class Plex:
|
|||
else: method = None
|
||||
return self.Plex._server.query(key, method=method)
|
||||
|
||||
def alter_collection(self, item, collection, smart_label_collection=False, add=True):
|
||||
if smart_label_collection:
|
||||
self.query_data(item.addLabel if add else item.removeLabel, collection)
|
||||
else:
|
||||
locked = True
|
||||
if self.agent in ["tv.plex.agents.movie", "tv.plex.agents.series"]:
|
||||
field = next((f for f in item.fields if f.name == "collection"), None)
|
||||
locked = field is not None
|
||||
self.query_collection(item, collection, locked=locked, add=add)
|
||||
|
||||
def move_item(self, collection, item, after=None):
|
||||
key = f"{collection.key}/items/{item}/move"
|
||||
if after:
|
||||
|
@ -681,21 +534,6 @@ class Plex:
|
|||
else:
|
||||
raise Failed("Plex Error: No Items found in Plex")
|
||||
|
||||
def add_missing(self, collection, items, is_movie):
|
||||
col_name = collection.encode("ascii", "replace").decode()
|
||||
if col_name not in self.missing:
|
||||
self.missing[col_name] = {}
|
||||
section = "Movies Missing (TMDb IDs)" if is_movie else "Shows Missing (TVDb IDs)"
|
||||
if section not in self.missing[col_name]:
|
||||
self.missing[col_name][section] = {}
|
||||
for title, item_id in items:
|
||||
self.missing[col_name][section][int(item_id)] = str(title).encode("ascii", "replace").decode()
|
||||
with open(self.missing_path, "w"): pass
|
||||
try:
|
||||
yaml.round_trip_dump(self.missing, open(self.missing_path, "w"))
|
||||
except yaml.scanner.ScannerError as e:
|
||||
util.print_multiline(f"YAML Error: {util.tab_new_lines(e)}", error=True)
|
||||
|
||||
def get_collection_items(self, collection, smart_label_collection):
|
||||
if smart_label_collection:
|
||||
return self.get_labeled_items(collection.title if isinstance(collection, Collection) else str(collection))
|
||||
|
@ -715,27 +553,6 @@ class Plex:
|
|||
name = collection.title if isinstance(collection, Collection) else str(collection)
|
||||
return name, self.get_collection_items(collection, smart_label_collection)
|
||||
|
||||
def map_guids(self):
|
||||
items = self.get_all()
|
||||
logger.info(f"Mapping {self.type} Library: {self.name}")
|
||||
logger.info("")
|
||||
for i, item in enumerate(items, 1):
|
||||
util.print_return(f"Processing: {i}/{len(items)} {item.title}")
|
||||
if item.ratingKey not in self.movie_rating_key_map and item.ratingKey not in self.show_rating_key_map:
|
||||
id_type, main_id, imdb_id = self.config.Convert.get_id(item, self)
|
||||
if main_id:
|
||||
if id_type == "movie":
|
||||
self.movie_rating_key_map[item.ratingKey] = main_id[0]
|
||||
util.add_dict_list(main_id, item.ratingKey, self.movie_map)
|
||||
elif id_type == "show":
|
||||
self.show_rating_key_map[item.ratingKey] = main_id[0]
|
||||
util.add_dict_list(main_id, item.ratingKey, self.show_map)
|
||||
if imdb_id:
|
||||
util.add_dict_list(imdb_id, item.ratingKey, self.imdb_map)
|
||||
logger.info("")
|
||||
logger.info(util.adjust_space(f"Processed {len(items)} {self.type}s"))
|
||||
return items
|
||||
|
||||
def get_tmdb_from_map(self, item):
|
||||
return self.movie_rating_key_map[item.ratingKey] if item.ratingKey in self.movie_rating_key_map else None
|
||||
|
||||
|
@ -791,7 +608,6 @@ class Plex:
|
|||
|
||||
def update_item_from_assets(self, item, overlay=None, create=False):
|
||||
name = os.path.basename(os.path.dirname(str(item.locations[0])) if self.is_movie else str(item.locations[0]))
|
||||
logger.debug(name)
|
||||
found_folder = False
|
||||
poster = None
|
||||
background = None
|
||||
|
@ -822,14 +638,23 @@ class Plex:
|
|||
self.upload_images(item, poster=poster, background=background, overlay=overlay)
|
||||
if self.is_show:
|
||||
for season in self.query(item.seasons):
|
||||
season_name = f"Season{'0' if season.seasonNumber < 10 else ''}{season.seasonNumber}"
|
||||
if item_dir:
|
||||
season_filter = os.path.join(item_dir, f"Season{'0' if season.seasonNumber < 10 else ''}{season.seasonNumber}.*")
|
||||
season_poster_filter = os.path.join(item_dir, f"{season_name}.*")
|
||||
season_background_filter = os.path.join(item_dir, f"{season_name}_background.*")
|
||||
else:
|
||||
season_filter = os.path.join(ad, f"{name}_Season{'0' if season.seasonNumber < 10 else ''}{season.seasonNumber}.*")
|
||||
matches = util.glob_filter(season_filter)
|
||||
season_poster_filter = os.path.join(ad, f"{name}_{season_name}.*")
|
||||
season_background_filter = os.path.join(ad, f"{name}_{season_name}_background.*")
|
||||
matches = util.glob_filter(season_poster_filter)
|
||||
season_poster = None
|
||||
season_background = None
|
||||
if len(matches) > 0:
|
||||
season_poster = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title} Season {season.seasonNumber}'s ", is_url=False)
|
||||
self.upload_images(season, poster=season_poster)
|
||||
matches = util.glob_filter(season_background_filter)
|
||||
if len(matches) > 0:
|
||||
season_background = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title} Season {season.seasonNumber}'s ", is_poster=False, is_url=False)
|
||||
if season_poster or season_background:
|
||||
self.upload_images(season, poster=season_poster, background=season_background)
|
||||
for episode in self.query(season.episodes):
|
||||
if item_dir:
|
||||
episode_filter = os.path.join(item_dir, f"{episode.seasonEpisode.upper()}.*")
|
||||
|
@ -846,32 +671,5 @@ class Plex:
|
|||
logger.info(f"Asset Directory Created: {os.path.join(self.asset_directory[0], name)}")
|
||||
elif not overlay and self.asset_folders and not found_folder:
|
||||
logger.error(f"Asset Warning: No asset folder found called '{name}'")
|
||||
elif not poster and not background:
|
||||
elif not poster and not background and self.show_missing_assets:
|
||||
logger.error(f"Asset Warning: No poster or background found in an assets folder for '{name}'")
|
||||
|
||||
def find_collection_assets(self, item, name=None, create=False):
|
||||
if name is None:
|
||||
name = item.title
|
||||
for ad in self.asset_directory:
|
||||
poster = None
|
||||
background = None
|
||||
if self.asset_folders:
|
||||
if not os.path.isdir(os.path.join(ad, name)):
|
||||
continue
|
||||
poster_filter = os.path.join(ad, name, "poster.*")
|
||||
background_filter = os.path.join(ad, name, "background.*")
|
||||
else:
|
||||
poster_filter = os.path.join(ad, f"{name}.*")
|
||||
background_filter = os.path.join(ad, f"{name}_background.*")
|
||||
matches = util.glob_filter(poster_filter)
|
||||
if len(matches) > 0:
|
||||
poster = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title}'s ", is_url=False)
|
||||
matches = util.glob_filter(background_filter)
|
||||
if len(matches) > 0:
|
||||
background = ImageData("asset_directory", os.path.abspath(matches[0]), prefix=f"{item.title}'s ", is_poster=False, is_url=False)
|
||||
if poster or background:
|
||||
return poster, background
|
||||
if create and self.asset_folders and not os.path.isdir(os.path.join(self.asset_directory[0], name)):
|
||||
os.makedirs(os.path.join(self.asset_directory[0], name), exist_ok=True)
|
||||
logger.info(f"Asset Directory Created: {os.path.join(self.asset_directory[0], name)}")
|
||||
return None, None
|
||||
|
|
|
@ -17,6 +17,7 @@ class Radarr:
|
|||
self.token = params["token"]
|
||||
try:
|
||||
self.api = RadarrAPI(self.url, self.token, session=self.config.session)
|
||||
self.api.respect_list_exclusions_when_adding()
|
||||
except ArrException as e:
|
||||
raise Failed(e)
|
||||
self.add = params["add"]
|
||||
|
@ -61,6 +62,8 @@ class Radarr:
|
|||
for tmdb_id in invalid:
|
||||
logger.info(f"Invalid TMDb ID | {tmdb_id}")
|
||||
|
||||
return len(added)
|
||||
|
||||
def edit_tags(self, tmdb_ids, tags, apply_tags):
|
||||
logger.info("")
|
||||
logger.info(f"{apply_tags_translation[apply_tags].capitalize()} Radarr Tags: {tags}")
|
||||
|
|
|
@ -35,6 +35,7 @@ class Sonarr:
|
|||
self.token = params["token"]
|
||||
try:
|
||||
self.api = SonarrAPI(self.url, self.token, session=self.config.session)
|
||||
self.api.respect_list_exclusions_when_adding()
|
||||
except ArrException as e:
|
||||
raise Failed(e)
|
||||
self.add = params["add"]
|
||||
|
@ -59,7 +60,7 @@ class Sonarr:
|
|||
monitor = monitor_translation[options["monitor"] if "monitor" in options else self.monitor]
|
||||
quality_profile = options["quality"] if "quality" in options else self.quality_profile
|
||||
language_profile = options["language"] if "language" in options else self.language_profile
|
||||
language_profile = language_profile if self.api.v3 else 1
|
||||
language_profile = language_profile if self.api._raw.v3 else 1
|
||||
series = options["series"] if "series" in options else self.series_type
|
||||
season = options["season"] if "season" in options else self.season_folder
|
||||
tags = options["tag"] if "tag" in options else self.tag
|
||||
|
@ -87,6 +88,8 @@ class Sonarr:
|
|||
logger.info("")
|
||||
logger.info(f"Invalid TVDb ID | {tvdb_id}")
|
||||
|
||||
return len(added)
|
||||
|
||||
def edit_tags(self, tvdb_ids, tags, apply_tags):
|
||||
logger.info("")
|
||||
logger.info(f"{apply_tags_translation[apply_tags].capitalize()} Sonarr Tags: {tags}")
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
import logging
|
||||
|
||||
from plexapi.video import Movie, Show
|
||||
|
||||
from modules import util
|
||||
from modules.util import Failed
|
||||
from plexapi.exceptions import BadRequest, NotFound
|
||||
|
@ -40,7 +43,9 @@ class Tautulli:
|
|||
for item in items:
|
||||
if item["section_id"] == section_id and count < int(params['list_size']):
|
||||
try:
|
||||
library.fetchItem(int(item["rating_key"]))
|
||||
plex_item = library.fetchItem(int(item["rating_key"]))
|
||||
if not isinstance(plex_item, (Movie, Show)):
|
||||
raise BadRequest
|
||||
rating_keys.append(item["rating_key"])
|
||||
except (BadRequest, NotFound):
|
||||
new_item = library.exact_search(item["title"], year=item["year"])
|
||||
|
@ -65,5 +70,5 @@ class Tautulli:
|
|||
else: raise Failed(f"Tautulli Error: No Library named {library_name} in the response")
|
||||
|
||||
def _request(self, url):
|
||||
logger.debug(f"Tautulli URL: {url.replace(self.apikey, '###############')}")
|
||||
logger.debug(f"Tautulli URL: {url.replace(self.apikey, 'APIKEY').replace(self.url, 'URL')}")
|
||||
return self.config.get_json(url)
|
||||
|
|
|
@ -56,9 +56,12 @@ class TMDb:
|
|||
self.TMDb = tmdbv3api.TMDb(session=self.config.session)
|
||||
self.TMDb.api_key = params["apikey"]
|
||||
self.TMDb.language = params["language"]
|
||||
response = tmdbv3api.Configuration().info()
|
||||
if hasattr(response, "status_message"):
|
||||
raise Failed(f"TMDb Error: {response.status_message}")
|
||||
try:
|
||||
response = tmdbv3api.Configuration().info()
|
||||
if hasattr(response, "status_message"):
|
||||
raise Failed(f"TMDb Error: {response.status_message}")
|
||||
except TMDbException as e:
|
||||
raise Failed(f"TMDb Error: {e}")
|
||||
self.apikey = params["apikey"]
|
||||
self.language = params["language"]
|
||||
self.Movie = tmdbv3api.Movie()
|
||||
|
@ -199,6 +202,8 @@ class TMDb:
|
|||
for date_attr in discover_dates:
|
||||
if date_attr in attrs:
|
||||
attrs[date_attr] = util.validate_date(attrs[date_attr], f"tmdb_discover attribute {date_attr}", return_as="%Y-%m-%d")
|
||||
if self.config.trace_mode:
|
||||
logger.debug(f"Params: {attrs}")
|
||||
self.Discover.discover_movies(attrs) if is_movie else self.Discover.discover_tv_shows(attrs)
|
||||
total_pages = int(self.TMDb.total_pages)
|
||||
total_results = int(self.TMDb.total_results)
|
||||
|
|
|
@ -9,8 +9,10 @@ redirect_uri = "urn:ietf:wg:oauth:2.0:oob"
|
|||
redirect_uri_encoded = redirect_uri.replace(":", "%3A")
|
||||
base_url = "https://api.trakt.tv"
|
||||
builders = [
|
||||
"trakt_collected", "trakt_collection", "trakt_list", "trakt_list_details", "trakt_popular",
|
||||
"trakt_recommended", "trakt_trending", "trakt_watched", "trakt_watchlist"
|
||||
"trakt_collected_daily", "trakt_collected_weekly", "trakt_collected_monthly", "trakt_collected_yearly", "trakt_collected_all",
|
||||
"trakt_recommended_daily", "trakt_recommended_weekly", "trakt_recommended_monthly", "trakt_recommended_yearly", "trakt_recommended_all",
|
||||
"trakt_watched_daily", "trakt_watched_weekly", "trakt_watched_monthly", "trakt_watched_yearly", "trakt_watched_all",
|
||||
"trakt_collection", "trakt_list", "trakt_list_details", "trakt_popular", "trakt_trending", "trakt_watchlist", "trakt_boxoffice"
|
||||
]
|
||||
sorts = [
|
||||
"rank", "added", "title", "released", "runtime", "popularity",
|
||||
|
@ -105,6 +107,8 @@ class Trakt:
|
|||
output_json = []
|
||||
pages = 1
|
||||
current = 1
|
||||
if self.config.trace_mode:
|
||||
logger.debug(f"URL: {base_url}{url}")
|
||||
while current <= pages:
|
||||
if pages == 1:
|
||||
response = self.config.get(f"{base_url}{url}", headers=headers)
|
||||
|
@ -114,6 +118,8 @@ class Trakt:
|
|||
response = self.config.get(f"{base_url}{url}?page={current}", headers=headers)
|
||||
if response.status_code == 200:
|
||||
json_data = response.json()
|
||||
if self.config.trace_mode:
|
||||
logger.debug(f"Response: {json_data}")
|
||||
if isinstance(json_data, dict):
|
||||
return json_data
|
||||
else:
|
||||
|
@ -148,7 +154,7 @@ class Trakt:
|
|||
for item in items:
|
||||
if typeless:
|
||||
data = item
|
||||
current_type = None
|
||||
current_type = item_type
|
||||
elif item_type:
|
||||
data = item[item_type]
|
||||
current_type = item_type
|
||||
|
@ -158,7 +164,7 @@ class Trakt:
|
|||
else:
|
||||
continue
|
||||
id_type = "tmdb" if current_type == "movie" else "tvdb"
|
||||
if data["ids"][id_type]:
|
||||
if id_type in data["ids"] and data["ids"][id_type]:
|
||||
final_id = data["ids"][id_type]
|
||||
if current_type == "episode":
|
||||
final_id = f"{final_id}_{item[current_type]['season']}"
|
||||
|
@ -216,14 +222,15 @@ class Trakt:
|
|||
def get_trakt_ids(self, method, data, is_movie):
|
||||
pretty = method.replace("_", " ").title()
|
||||
media_type = "Movie" if is_movie else "Show"
|
||||
if method in ["trakt_trending", "trakt_popular", "trakt_recommended", "trakt_watched", "trakt_collected"]:
|
||||
logger.info(f"Processing {pretty}: {data} {media_type}{'' if data == 1 else 's'}")
|
||||
return self._pagenation(method[6:], data, is_movie)
|
||||
elif method in ["trakt_collection", "trakt_watchlist"]:
|
||||
if method in ["trakt_collection", "trakt_watchlist"]:
|
||||
logger.info(f"Processing {pretty} {media_type}s for {data}")
|
||||
return self._user_items(method[6:], data, is_movie)
|
||||
elif method == "trakt_list":
|
||||
logger.info(f"Processing {pretty}: {data}")
|
||||
return self._user_list(data)
|
||||
elif method in builders:
|
||||
logger.info(f"Processing {pretty}: {data} {media_type}{'' if data == 1 else 's'}")
|
||||
terms = method.split("_")
|
||||
return self._pagenation(f"{terms[1]}{f'/{terms[2]}' if len(terms) > 2 else ''}", data, is_movie)
|
||||
else:
|
||||
raise Failed(f"Trakt Error: Method {method} not supported")
|
||||
|
|
|
@ -13,6 +13,28 @@ urls = {
|
|||
"movies": f"{base_url}/movies/", "alt_movies": f"{alt_url}/movies/",
|
||||
"series_id": f"{base_url}/dereferrer/series/", "movie_id": f"{base_url}/dereferrer/movie/"
|
||||
}
|
||||
language_translation = {
|
||||
"ab": "abk", "aa": "aar", "af": "afr", "ak": "aka", "sq": "sqi", "am": "amh", "ar": "ara", "an": "arg", "hy": "hye",
|
||||
"as": "asm", "av": "ava", "ae": "ave", "ay": "aym", "az": "aze", "bm": "bam", "ba": "bak", "eu": "eus", "be": "bel",
|
||||
"bn": "ben", "bi": "bis", "bs": "bos", "br": "bre", "bg": "bul", "my": "mya", "ca": "cat", "ch": "cha", "ce": "che",
|
||||
"ny": "nya", "zh": "zho", "cv": "chv", "kw": "cor", "co": "cos", "cr": "cre", "hr": "hrv", "cs": "ces", "da": "dan",
|
||||
"dv": "div", "nl": "nld", "dz": "dzo", "en": "eng", "eo": "epo", "et": "est", "ee": "ewe", "fo": "fao", "fj": "fij",
|
||||
"fi": "fin", "fr": "fra", "ff": "ful", "gl": "glg", "ka": "kat", "de": "deu", "el": "ell", "gn": "grn", "gu": "guj",
|
||||
"ht": "hat", "ha": "hau", "he": "heb", "hz": "her", "hi": "hin", "ho": "hmo", "hu": "hun", "ia": "ina", "id": "ind",
|
||||
"ie": "ile", "ga": "gle", "ig": "ibo", "ik": "ipk", "io": "ido", "is": "isl", "it": "ita", "iu": "iku", "ja": "jpn",
|
||||
"jv": "jav", "kl": "kal", "kn": "kan", "kr": "kau", "ks": "kas", "kk": "kaz", "km": "khm", "ki": "kik", "rw": "kin",
|
||||
"ky": "kir", "kv": "kom", "kg": "kon", "ko": "kor", "ku": "kur", "kj": "kua", "la": "lat", "lb": "ltz", "lg": "lug",
|
||||
"li": "lim", "ln": "lin", "lo": "lao", "lt": "lit", "lu": "lub", "lv": "lav", "gv": "glv", "mk": "mkd", "mg": "mlg",
|
||||
"ms": "msa", "ml": "mal", "mt": "mlt", "mi": "mri", "mr": "mar", "mh": "mah", "mn": "mon", "na": "nau", "nv": "nav",
|
||||
"nd": "nde", "ne": "nep", "ng": "ndo", "nb": "nob", "nn": "nno", "no": "nor", "ii": "iii", "nr": "nbl", "oc": "oci",
|
||||
"oj": "oji", "cu": "chu", "om": "orm", "or": "ori", "os": "oss", "pa": "pan", "pi": "pli", "fa": "fas", "pl": "pol",
|
||||
"ps": "pus", "pt": "por", "qu": "que", "rm": "roh", "rn": "run", "ro": "ron", "ru": "rus", "sa": "san", "sc": "srd",
|
||||
"sd": "snd", "se": "sme", "sm": "smo", "sg": "sag", "sr": "srp", "gd": "gla", "sn": "sna", "si": "sin", "sk": "slk",
|
||||
"sl": "slv", "so": "som", "st": "sot", "es": "spa", "su": "sun", "sw": "swa", "ss": "ssw", "sv": "swe", "ta": "tam",
|
||||
"te": "tel", "tg": "tgk", "th": "tha", "ti": "tir", "bo": "bod", "tk": "tuk", "tl": "tgl", "tn": "tsn", "to": "ton",
|
||||
"tr": "tur", "ts": "tso", "tt": "tat", "tw": "twi", "ty": "tah", "ug": "uig", "uk": "ukr", "ur": "urd", "uz": "uzb",
|
||||
"ve": "ven", "vi": "vie", "vo": "vol", "wa": "wln", "cy": "cym", "wo": "wol", "fy": "fry", "xh": "xho", "yi": "yid",
|
||||
"yo": "yor", "za": "zha", "zu": "zul"}
|
||||
|
||||
class TVDbObj:
|
||||
def __init__(self, tvdb_url, language, is_movie, config):
|
||||
|
@ -27,6 +49,8 @@ class TVDbObj:
|
|||
else:
|
||||
raise Failed(f"TVDb Error: {self.tvdb_url} must begin with {urls['movies'] if self.is_movie else urls['series']}")
|
||||
|
||||
if self.config.trace_mode:
|
||||
logger.debug(f"URL: {tvdb_url}")
|
||||
response = self.config.get_html(self.tvdb_url, headers=util.header(self.language))
|
||||
results = response.xpath(f"//*[text()='TheTVDB.com {self.media_type} ID']/parent::node()/span/text()")
|
||||
if len(results) > 0:
|
||||
|
@ -38,23 +62,27 @@ class TVDbObj:
|
|||
else:
|
||||
raise Failed(f"TVDb Error: Could not find a TVDb {self.media_type} ID at the URL {self.tvdb_url}")
|
||||
|
||||
def parse_page(xpath, fail=None, multi=False):
|
||||
def parse_page(xpath):
|
||||
parse_results = response.xpath(xpath)
|
||||
if len(parse_results) > 0:
|
||||
parse_results = [r.strip() for r in parse_results if len(r) > 0]
|
||||
if not multi and len(parse_results) > 0:
|
||||
return parse_results[0]
|
||||
elif len(parse_results) > 0:
|
||||
return parse_results
|
||||
elif fail is not None:
|
||||
raise Failed(f"TVDb Error: {fail} not found from TVDb URL: {self.tvdb_url}")
|
||||
else:
|
||||
return None
|
||||
return parse_results[0] if len(parse_results) > 0 else None
|
||||
|
||||
def parse_title_summary(lang=None):
|
||||
place = "//div[@class='change_translation_text' and "
|
||||
place += f"@data-language='{lang}']" if lang else "not(@style='display:none')]"
|
||||
return parse_page(f"{place}/@data-title"), parse_page(f"{place}/p/text()[normalize-space()]")
|
||||
|
||||
self.title, self.summary = parse_title_summary(lang=self.language)
|
||||
if not self.title and self.language in language_translation:
|
||||
self.title, self.summary = parse_title_summary(lang=language_translation[self.language])
|
||||
if not self.title:
|
||||
self.title, self.summary = parse_title_summary()
|
||||
if not self.title:
|
||||
raise Failed(f"TVDb Error: Name not found from TVDb URL: {self.tvdb_url}")
|
||||
|
||||
self.title = parse_page("//div[@class='change_translation_text' and not(@style='display:none')]/@data-title", fail="Name")
|
||||
self.poster_path = parse_page("//div[@class='row hidden-xs hidden-sm']/div/img/@src")
|
||||
self.background_path = parse_page("(//h2[@class='mt-4' and text()='Backgrounds']/following::div/a/@href)[1]")
|
||||
self.summary = parse_page("//div[@class='change_translation_text' and not(@style='display:none')]/p/text()[normalize-space()]")
|
||||
if self.is_movie:
|
||||
self.directors = parse_page("//strong[text()='Directors']/parent::li/span/a/text()[normalize-space()]")
|
||||
self.writers = parse_page("//strong[text()='Writers']/parent::li/span/a/text()[normalize-space()]")
|
||||
|
@ -84,49 +112,52 @@ class TVDbObj:
|
|||
self.imdb_id = imdb_id
|
||||
|
||||
class TVDb:
|
||||
def __init__(self, config):
|
||||
def __init__(self, config, tvdb_language):
|
||||
self.config = config
|
||||
self.tvdb_language = tvdb_language
|
||||
|
||||
def get_item(self, language, tvdb_url, is_movie):
|
||||
return self.get_movie(language, tvdb_url) if is_movie else self.get_series(language, tvdb_url)
|
||||
def get_item(self, tvdb_url, is_movie):
|
||||
return self.get_movie(tvdb_url) if is_movie else self.get_series(tvdb_url)
|
||||
|
||||
def get_series(self, language, tvdb_url):
|
||||
def get_series(self, tvdb_url):
|
||||
try:
|
||||
tvdb_url = f"{urls['series_id']}{int(tvdb_url)}"
|
||||
except ValueError:
|
||||
pass
|
||||
return TVDbObj(tvdb_url, language, False, self.config)
|
||||
return TVDbObj(tvdb_url, self.tvdb_language, False, self.config)
|
||||
|
||||
def get_movie(self, language, tvdb_url):
|
||||
def get_movie(self, tvdb_url):
|
||||
try:
|
||||
tvdb_url = f"{urls['movie_id']}{int(tvdb_url)}"
|
||||
except ValueError:
|
||||
pass
|
||||
return TVDbObj(tvdb_url, language, True, self.config)
|
||||
return TVDbObj(tvdb_url, self.tvdb_language, True, self.config)
|
||||
|
||||
def get_list_description(self, tvdb_url, language):
|
||||
response = self.config.get_html(tvdb_url, headers=util.header(language))
|
||||
def get_list_description(self, tvdb_url):
|
||||
response = self.config.get_html(tvdb_url, headers=util.header(self.tvdb_language))
|
||||
description = response.xpath("//div[@class='block']/div[not(@style='display:none')]/p/text()")
|
||||
return description[0] if len(description) > 0 and len(description[0]) > 0 else ""
|
||||
|
||||
def _ids_from_url(self, tvdb_url, language):
|
||||
def _ids_from_url(self, tvdb_url):
|
||||
ids = []
|
||||
tvdb_url = tvdb_url.strip()
|
||||
if self.config.trace_mode:
|
||||
logger.debug(f"URL: {tvdb_url}")
|
||||
if tvdb_url.startswith((urls["list"], urls["alt_list"])):
|
||||
try:
|
||||
response = self.config.get_html(tvdb_url, headers=util.header(language))
|
||||
response = self.config.get_html(tvdb_url, headers=util.header(self.tvdb_language))
|
||||
items = response.xpath("//div[@class='col-xs-12 col-sm-12 col-md-8 col-lg-8 col-md-pull-4']/div[@class='row']")
|
||||
for item in items:
|
||||
title = item.xpath(".//div[@class='col-xs-12 col-sm-9 mt-2']//a/text()")[0]
|
||||
item_url = item.xpath(".//div[@class='col-xs-12 col-sm-9 mt-2']//a/@href")[0]
|
||||
if item_url.startswith("/series/"):
|
||||
try:
|
||||
ids.append((self.get_series(language, f"{base_url}{item_url}").id, "tvdb"))
|
||||
ids.append((self.get_series(f"{base_url}{item_url}").id, "tvdb"))
|
||||
except Failed as e:
|
||||
logger.error(f"{e} for series {title}")
|
||||
elif item_url.startswith("/movies/"):
|
||||
try:
|
||||
movie = self.get_movie(language, f"{base_url}{item_url}")
|
||||
movie = self.get_movie(f"{base_url}{item_url}")
|
||||
if movie.tmdb_id:
|
||||
ids.append((movie.tmdb_id, "tmdb"))
|
||||
elif movie.imdb_id:
|
||||
|
@ -145,19 +176,19 @@ class TVDb:
|
|||
else:
|
||||
raise Failed(f"TVDb Error: {tvdb_url} must begin with {urls['list']}")
|
||||
|
||||
def get_tvdb_ids(self, method, data, language):
|
||||
def get_tvdb_ids(self, method, data):
|
||||
if method == "tvdb_show":
|
||||
logger.info(f"Processing TVDb Show: {data}")
|
||||
return [(self.get_series(language, data).id, "tvdb")]
|
||||
return [(self.get_series(data).id, "tvdb")]
|
||||
elif method == "tvdb_movie":
|
||||
logger.info(f"Processing TVDb Movie: {data}")
|
||||
movie = self.get_movie(language, data)
|
||||
movie = self.get_movie(data)
|
||||
if movie.tmdb_id:
|
||||
return [(movie.tmdb_id, "tmdb")]
|
||||
elif movie.imdb_id:
|
||||
return [(movie.imdb_id, "imdb")]
|
||||
elif method == "tvdb_list":
|
||||
logger.info(f"Processing TVDb List: {data}")
|
||||
return self._ids_from_url(data, language)
|
||||
return self._ids_from_url(data)
|
||||
else:
|
||||
raise Failed(f"TVDb Error: Method {method} not supported")
|
||||
|
|
|
@ -19,6 +19,9 @@ class TimeoutExpired(Exception):
|
|||
class Failed(Exception):
|
||||
pass
|
||||
|
||||
class NotScheduled(Exception):
|
||||
pass
|
||||
|
||||
class ImageData:
|
||||
def __init__(self, attribute, location, prefix="", is_poster=True, is_url=True):
|
||||
self.attribute = attribute
|
||||
|
@ -29,6 +32,9 @@ class ImageData:
|
|||
self.compare = location if is_url else os.stat(location).st_size
|
||||
self.message = f"{prefix}{'poster' if is_poster else 'background'} to [{'URL' if is_url else 'File'}] {location}"
|
||||
|
||||
def __str__(self):
|
||||
return str(self.__dict__)
|
||||
|
||||
def retry_if_not_failed(exception):
|
||||
return not isinstance(exception, Failed)
|
||||
|
||||
|
@ -103,7 +109,7 @@ def logger_input(prompt, timeout=60):
|
|||
else: raise SystemError("Input Timeout not supported on this system")
|
||||
|
||||
def header(language="en-US,en;q=0.5"):
|
||||
return {"Accept-Language": language, "User-Agent": "Mozilla/5.0 x64"}
|
||||
return {"Accept-Language": "eng" if language == "default" else language, "User-Agent": "Mozilla/5.0 x64"}
|
||||
|
||||
def alarm_handler(signum, frame):
|
||||
raise TimeoutExpired
|
||||
|
@ -298,7 +304,7 @@ def parse(attribute, data, datatype=None, methods=None, parent=None, default=Non
|
|||
value = data[methods[attribute]] if methods and attribute in methods else data
|
||||
|
||||
if datatype == "list":
|
||||
if methods and attribute in methods and data[methods[attribute]]:
|
||||
if value:
|
||||
return [v for v in value if v] if isinstance(value, list) else [str(value)]
|
||||
return []
|
||||
elif datatype == "dictlist":
|
||||
|
|
83
modules/webhooks.py
Normal file
83
modules/webhooks.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
import logging
|
||||
|
||||
from modules.util import Failed
|
||||
|
||||
logger = logging.getLogger("Plex Meta Manager")
|
||||
|
||||
class Webhooks:
|
||||
def __init__(self, config, system_webhooks, library=None, notifiarr=None):
|
||||
self.config = config
|
||||
self.error_webhooks = system_webhooks["error"] if "error" in system_webhooks else []
|
||||
self.run_start_webhooks = system_webhooks["run_start"] if "run_start" in system_webhooks else []
|
||||
self.run_end_webhooks = system_webhooks["run_end"] if "run_end" in system_webhooks else []
|
||||
self.library = library
|
||||
self.notifiarr = notifiarr
|
||||
|
||||
def _request(self, webhooks, json):
|
||||
if self.config.trace_mode:
|
||||
logger.debug("")
|
||||
logger.debug(f"JSON: {json}")
|
||||
for webhook in list(set(webhooks)):
|
||||
if self.config.trace_mode:
|
||||
logger.debug(f"Webhook: {webhook}")
|
||||
if webhook == "notifiarr":
|
||||
url, params = self.notifiarr.get_url("notification/plex/")
|
||||
response = self.config.get(url, json=json, params=params)
|
||||
else:
|
||||
response = self.config.post(webhook, json=json)
|
||||
response_json = response.json()
|
||||
if self.config.trace_mode:
|
||||
logger.debug(f"Response: {response_json}")
|
||||
if response.status_code >= 400 or ("result" in response_json and response_json["result"] == "error"):
|
||||
raise Failed(f"({response.status_code} [{response.reason}]) {response_json}")
|
||||
|
||||
def start_time_hooks(self, start_time):
|
||||
if self.run_start_webhooks:
|
||||
self._request(self.run_start_webhooks, {"start_time": start_time})
|
||||
|
||||
def end_time_hooks(self, start_time, run_time, stats):
|
||||
if self.run_end_webhooks:
|
||||
self._request(self.run_end_webhooks, {
|
||||
"start_time": start_time.strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
"run_time": run_time,
|
||||
"collections_created": stats["created"],
|
||||
"collections_modified": stats["modified"],
|
||||
"collections_deleted": stats["deleted"],
|
||||
"items_added": stats["added"],
|
||||
"items_removed": stats["removed"],
|
||||
"added_to_radarr": stats["radarr"],
|
||||
"added_to_sonarr": stats["sonarr"],
|
||||
})
|
||||
|
||||
def error_hooks(self, text, library=None, collection=None, critical=True):
|
||||
if self.error_webhooks:
|
||||
json = {"error": str(text), "critical": critical}
|
||||
if library:
|
||||
json["server_name"] = library.PlexServer.friendlyName
|
||||
json["library_name"] = library.name
|
||||
if collection:
|
||||
json["collection"] = str(collection)
|
||||
self._request(self.error_webhooks, json)
|
||||
|
||||
def collection_hooks(self, webhooks, collection, created=False, additions=None, removals=None):
|
||||
if self.library:
|
||||
thumb = None
|
||||
if collection.thumb and next((f for f in collection.fields if f.name == "thumb"), None):
|
||||
thumb = self.config.get_image_encoded(f"{self.library.url}{collection.thumb}?X-Plex-Token={self.library.token}")
|
||||
art = None
|
||||
if collection.art and next((f for f in collection.fields if f.name == "art"), None):
|
||||
art = self.config.get_image_encoded(f"{self.library.url}{collection.art}?X-Plex-Token={self.library.token}")
|
||||
json = {
|
||||
"server_name": self.library.PlexServer.friendlyName,
|
||||
"library_name": self.library.name,
|
||||
"type": "movie" if self.library.is_movie else "show",
|
||||
"collection": collection.title,
|
||||
"created": created,
|
||||
"poster": thumb,
|
||||
"background": art
|
||||
}
|
||||
if additions:
|
||||
json["additions"] = additions
|
||||
if removals:
|
||||
json["removals"] = removals
|
||||
self._request(webhooks, json)
|
|
@ -2,11 +2,11 @@ import argparse, logging, os, sys, time
|
|||
from datetime import datetime
|
||||
from logging.handlers import RotatingFileHandler
|
||||
try:
|
||||
import schedule
|
||||
import plexapi, schedule
|
||||
from modules import util
|
||||
from modules.builder import CollectionBuilder
|
||||
from modules.config import Config
|
||||
from modules.util import Failed
|
||||
from modules.util import Failed, NotScheduled
|
||||
except ModuleNotFoundError:
|
||||
print("Requirements Error: Requirements are not installed")
|
||||
sys.exit(0)
|
||||
|
@ -17,6 +17,7 @@ if sys.version_info[0] != 3 or sys.version_info[1] < 6:
|
|||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("-db", "--debug", dest="debug", help=argparse.SUPPRESS, action="store_true", default=False)
|
||||
parser.add_argument("-tr", "--trace", dest="trace", help=argparse.SUPPRESS, action="store_true", default=False)
|
||||
parser.add_argument("-c", "--config", dest="config", help="Run with desired *.yml file", type=str)
|
||||
parser.add_argument("-t", "--time", "--times", dest="times", help="Times to update each day use format HH:MM (Default: 03:00) (comma-separated list)", default="03:00", type=str)
|
||||
parser.add_argument("-re", "--resume", dest="resume", help="Resume collection run from a specific collection", type=str)
|
||||
|
@ -51,6 +52,7 @@ def get_arg(env_str, default, arg_bool=False, arg_int=False):
|
|||
|
||||
test = get_arg("PMM_TEST", args.test, arg_bool=True)
|
||||
debug = get_arg("PMM_DEBUG", args.debug, arg_bool=True)
|
||||
trace = get_arg("PMM_TRACE", args.trace, arg_bool=True)
|
||||
run = get_arg("PMM_RUN", args.run, arg_bool=True)
|
||||
no_countdown = get_arg("PMM_NO_COUNTDOWN", args.no_countdown, arg_bool=True)
|
||||
no_missing = get_arg("PMM_NO_MISSING", args.no_missing, arg_bool=True)
|
||||
|
@ -61,8 +63,9 @@ libraries = get_arg("PMM_LIBRARIES", args.libraries)
|
|||
resume = get_arg("PMM_RESUME", args.resume)
|
||||
times = get_arg("PMM_TIME", args.times)
|
||||
divider = get_arg("PMM_DIVIDER", args.divider)
|
||||
screen_width = get_arg("PMM_WIDTH", args.width)
|
||||
screen_width = get_arg("PMM_WIDTH", args.width, arg_int=True)
|
||||
config_file = get_arg("PMM_CONFIG", args.config)
|
||||
stats = {}
|
||||
|
||||
util.separating_character = divider[0]
|
||||
|
||||
|
@ -92,13 +95,21 @@ def fmt_filter(record):
|
|||
return True
|
||||
|
||||
cmd_handler = logging.StreamHandler()
|
||||
cmd_handler.setLevel(logging.DEBUG if test or debug else logging.INFO)
|
||||
cmd_handler.setLevel(logging.DEBUG if test or debug or trace else logging.INFO)
|
||||
|
||||
logger.addHandler(cmd_handler)
|
||||
|
||||
sys.excepthook = util.my_except_hook
|
||||
|
||||
def start(config_path, is_test=False, time_scheduled=None, requested_collections=None, requested_libraries=None, resume_from=None):
|
||||
version = "Unknown"
|
||||
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "VERSION")) as handle:
|
||||
for line in handle.readlines():
|
||||
line = line.strip()
|
||||
if len(line) > 0:
|
||||
version = line
|
||||
break
|
||||
|
||||
def start(attrs):
|
||||
file_logger = os.path.join(default_dir, "logs", "meta.log")
|
||||
should_roll_over = os.path.isfile(file_logger)
|
||||
file_handler = RotatingFileHandler(file_logger, delay=True, mode="w", backupCount=10, encoding="utf-8")
|
||||
|
@ -108,114 +119,105 @@ def start(config_path, is_test=False, time_scheduled=None, requested_collections
|
|||
file_handler.doRollover()
|
||||
logger.addHandler(file_handler)
|
||||
util.separator()
|
||||
logger.info(util.centered(" "))
|
||||
logger.info("")
|
||||
logger.info(util.centered(" ____ _ __ __ _ __ __ "))
|
||||
logger.info(util.centered("| _ \\| | _____ __ | \\/ | ___| |_ __ _ | \\/ | __ _ _ __ __ _ __ _ ___ _ __ "))
|
||||
logger.info(util.centered("| |_) | |/ _ \\ \\/ / | |\\/| |/ _ \\ __/ _` | | |\\/| |/ _` | '_ \\ / _` |/ _` |/ _ \\ '__|"))
|
||||
logger.info(util.centered("| __/| | __/> < | | | | __/ || (_| | | | | | (_| | | | | (_| | (_| | __/ | "))
|
||||
logger.info(util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| "))
|
||||
logger.info(util.centered(" |___/ "))
|
||||
logger.info(util.centered(" Version: 1.12.2 "))
|
||||
if time_scheduled: start_type = f"{time_scheduled} "
|
||||
elif is_test: start_type = "Test "
|
||||
elif requested_collections: start_type = "Collections "
|
||||
elif requested_libraries: start_type = "Libraries "
|
||||
else: start_type = ""
|
||||
logger.info(f" Version: {version}")
|
||||
if "time" in attrs and attrs["time"]: start_type = f"{attrs['time']} "
|
||||
elif "test" in attrs and attrs["test"]: start_type = "Test "
|
||||
elif "collections" in attrs and attrs["collections"]: start_type = "Collections "
|
||||
elif "libraries" in attrs and attrs["libraries"]: start_type = "Libraries "
|
||||
else: start_type = ""
|
||||
start_time = datetime.now()
|
||||
if time_scheduled is None:
|
||||
time_scheduled = start_time.strftime("%H:%M")
|
||||
if "time" not in attrs:
|
||||
attrs["time"] = start_time.strftime("%H:%M")
|
||||
util.separator(f"Starting {start_type}Run")
|
||||
config = None
|
||||
global stats
|
||||
stats = {"created": 0, "modified": 0, "deleted": 0, "added": 0, "removed": 0, "radarr": 0, "sonarr": 0}
|
||||
try:
|
||||
config = Config(default_dir, config_path=config_path, is_test=is_test,
|
||||
time_scheduled=time_scheduled, requested_collections=requested_collections,
|
||||
requested_libraries=requested_libraries, resume_from=resume_from)
|
||||
update_libraries(config)
|
||||
config = Config(default_dir, attrs)
|
||||
except Exception as e:
|
||||
util.print_stacktrace()
|
||||
util.print_multiline(e, critical=True)
|
||||
else:
|
||||
try:
|
||||
update_libraries(config)
|
||||
except Exception as e:
|
||||
config.notify(e)
|
||||
util.print_stacktrace()
|
||||
util.print_multiline(e, critical=True)
|
||||
logger.info("")
|
||||
util.separator(f"Finished {start_type}Run\nRun Time: {str(datetime.now() - start_time).split('.')[0]}")
|
||||
run_time = str(datetime.now() - start_time).split('.')[0]
|
||||
if config:
|
||||
config.Webhooks.end_time_hooks(start_time, run_time, stats)
|
||||
util.separator(f"Finished {start_type}Run\nRun Time: {run_time}")
|
||||
logger.removeHandler(file_handler)
|
||||
|
||||
def update_libraries(config):
|
||||
global stats
|
||||
for library in config.libraries:
|
||||
os.makedirs(os.path.join(default_dir, "logs", library.mapping_name, "collections"), exist_ok=True)
|
||||
col_file_logger = os.path.join(default_dir, "logs", library.mapping_name, "library.log")
|
||||
should_roll_over = os.path.isfile(col_file_logger)
|
||||
library_handler = RotatingFileHandler(col_file_logger, delay=True, mode="w", backupCount=3, encoding="utf-8")
|
||||
util.apply_formatter(library_handler)
|
||||
if should_roll_over:
|
||||
library_handler.doRollover()
|
||||
logger.addHandler(library_handler)
|
||||
try:
|
||||
os.makedirs(os.path.join(default_dir, "logs", library.mapping_name, "collections"), exist_ok=True)
|
||||
col_file_logger = os.path.join(default_dir, "logs", library.mapping_name, "library.log")
|
||||
should_roll_over = os.path.isfile(col_file_logger)
|
||||
library_handler = RotatingFileHandler(col_file_logger, delay=True, mode="w", backupCount=3, encoding="utf-8")
|
||||
util.apply_formatter(library_handler)
|
||||
if should_roll_over:
|
||||
library_handler.doRollover()
|
||||
logger.addHandler(library_handler)
|
||||
|
||||
os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(library.timeout)
|
||||
logger.info("")
|
||||
util.separator(f"{library.name} Library")
|
||||
items = None
|
||||
if not library.is_other:
|
||||
plexapi.server.TIMEOUT = library.timeout
|
||||
logger.info("")
|
||||
util.separator(f"Mapping {library.name} Library", space=False, border=False)
|
||||
logger.info("")
|
||||
items = library.map_guids()
|
||||
if not config.test_mode and not config.resume_from and not collection_only and library.mass_update:
|
||||
mass_metadata(config, library, items=items)
|
||||
for metadata in library.metadata_files:
|
||||
logger.info("")
|
||||
util.separator(f"Running Metadata File\n{metadata.path}")
|
||||
if not config.test_mode and not config.resume_from and not collection_only:
|
||||
try:
|
||||
metadata.update_metadata()
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
collections_to_run = metadata.get_collections(config.requested_collections)
|
||||
if config.resume_from and config.resume_from not in collections_to_run:
|
||||
util.separator(f"{library.name} Library")
|
||||
items = None
|
||||
if not library.is_other:
|
||||
logger.info("")
|
||||
logger.warning(f"Collection: {config.resume_from} not in Metadata File: {metadata.path}")
|
||||
continue
|
||||
if collections_to_run and not library_only:
|
||||
util.separator(f"Mapping {library.name} Library", space=False, border=False)
|
||||
logger.info("")
|
||||
util.separator(f"{'Test ' if config.test_mode else ''}Collections")
|
||||
logger.removeHandler(library_handler)
|
||||
run_collection(config, library, metadata, collections_to_run)
|
||||
logger.addHandler(library_handler)
|
||||
if library.run_sort:
|
||||
logger.info("")
|
||||
util.separator(f"Sorting {library.name} Library's Collections", space=False, border=False)
|
||||
logger.info("")
|
||||
for builder in library.run_sort:
|
||||
items = library.map_guids()
|
||||
for metadata in library.metadata_files:
|
||||
logger.info("")
|
||||
util.separator(f"Sorting {builder.name} Collection", space=False, border=False)
|
||||
util.separator(f"Running Metadata File\n{metadata.path}")
|
||||
if not config.test_mode and not config.resume_from and not collection_only:
|
||||
try:
|
||||
metadata.update_metadata()
|
||||
except Failed as e:
|
||||
library.notify(e)
|
||||
logger.error(e)
|
||||
collections_to_run = metadata.get_collections(config.requested_collections)
|
||||
if config.resume_from and config.resume_from not in collections_to_run:
|
||||
logger.info("")
|
||||
logger.warning(f"Collection: {config.resume_from} not in Metadata File: {metadata.path}")
|
||||
continue
|
||||
if collections_to_run and not library_only:
|
||||
logger.info("")
|
||||
util.separator(f"{'Test ' if config.test_mode else ''}Collections")
|
||||
logger.removeHandler(library_handler)
|
||||
run_collection(config, library, metadata, collections_to_run)
|
||||
logger.addHandler(library_handler)
|
||||
if library.run_sort:
|
||||
logger.info("")
|
||||
builder.sort_collection()
|
||||
util.separator(f"Sorting {library.name} Library's Collections", space=False, border=False)
|
||||
logger.info("")
|
||||
for builder in library.run_sort:
|
||||
logger.info("")
|
||||
util.separator(f"Sorting {builder.name} Collection", space=False, border=False)
|
||||
logger.info("")
|
||||
builder.sort_collection()
|
||||
|
||||
if not config.test_mode and not config.requested_collections and ((library.show_unmanaged and not library_only) or (library.assets_for_all and not collection_only)):
|
||||
logger.info("")
|
||||
util.separator(f"Other {library.name} Library Operations")
|
||||
unmanaged_collections = []
|
||||
for col in library.get_all_collections():
|
||||
if col.title not in library.collections:
|
||||
unmanaged_collections.append(col)
|
||||
if not config.test_mode and not collection_only:
|
||||
library_operations(config, library, items=items)
|
||||
|
||||
if library.show_unmanaged and not library_only:
|
||||
logger.info("")
|
||||
util.separator(f"Unmanaged Collections in {library.name} Library", space=False, border=False)
|
||||
logger.info("")
|
||||
for col in unmanaged_collections:
|
||||
logger.info(col.title)
|
||||
logger.info("")
|
||||
logger.info(f"{len(unmanaged_collections)} Unmanaged Collections")
|
||||
|
||||
if library.assets_for_all and not collection_only:
|
||||
logger.info("")
|
||||
util.separator(f"All {library.type}s Assets Check for {library.name} Library", space=False, border=False)
|
||||
logger.info("")
|
||||
for col in unmanaged_collections:
|
||||
poster, background = library.find_collection_assets(col, create=library.create_asset_folders)
|
||||
library.upload_images(col, poster=poster, background=background)
|
||||
for item in library.get_all():
|
||||
library.update_item_from_assets(item, create=library.create_asset_folders)
|
||||
|
||||
logger.removeHandler(library_handler)
|
||||
logger.removeHandler(library_handler)
|
||||
except Exception as e:
|
||||
library.notify(e)
|
||||
util.print_stacktrace()
|
||||
util.print_multiline(e, critical=True)
|
||||
|
||||
has_run_again = False
|
||||
for library in config.libraries:
|
||||
|
@ -234,26 +236,32 @@ def update_libraries(config):
|
|||
util.print_end()
|
||||
for library in config.libraries:
|
||||
if library.run_again:
|
||||
col_file_logger = os.path.join(default_dir, "logs", library.mapping_name, f"library.log")
|
||||
library_handler = RotatingFileHandler(col_file_logger, mode="w", backupCount=3, encoding="utf-8")
|
||||
util.apply_formatter(library_handler)
|
||||
logger.addHandler(library_handler)
|
||||
library_handler.addFilter(fmt_filter)
|
||||
os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(library.timeout)
|
||||
logger.info("")
|
||||
util.separator(f"{library.name} Library Run Again")
|
||||
logger.info("")
|
||||
library.map_guids()
|
||||
for builder in library.run_again:
|
||||
try:
|
||||
col_file_logger = os.path.join(default_dir, "logs", library.mapping_name, f"library.log")
|
||||
library_handler = RotatingFileHandler(col_file_logger, mode="w", backupCount=3, encoding="utf-8")
|
||||
util.apply_formatter(library_handler)
|
||||
logger.addHandler(library_handler)
|
||||
library_handler.addFilter(fmt_filter)
|
||||
os.environ["PLEXAPI_PLEXAPI_TIMEOUT"] = str(library.timeout)
|
||||
logger.info("")
|
||||
util.separator(f"{builder.name} Collection")
|
||||
util.separator(f"{library.name} Library Run Again")
|
||||
logger.info("")
|
||||
try:
|
||||
builder.run_collections_again()
|
||||
except Failed as e:
|
||||
util.print_stacktrace()
|
||||
util.print_multiline(e, error=True)
|
||||
logger.removeHandler(library_handler)
|
||||
library.map_guids()
|
||||
for builder in library.run_again:
|
||||
logger.info("")
|
||||
util.separator(f"{builder.name} Collection")
|
||||
logger.info("")
|
||||
try:
|
||||
builder.run_collections_again()
|
||||
except Failed as e:
|
||||
library.notify(e, collection=builder.name, critical=False)
|
||||
util.print_stacktrace()
|
||||
util.print_multiline(e, error=True)
|
||||
logger.removeHandler(library_handler)
|
||||
except Exception as e:
|
||||
library.notify(e)
|
||||
util.print_stacktrace()
|
||||
util.print_multiline(e, critical=True)
|
||||
|
||||
used_url = []
|
||||
for library in config.libraries:
|
||||
|
@ -266,166 +274,218 @@ def update_libraries(config):
|
|||
if library.optimize:
|
||||
library.query(library.PlexServer.library.optimize)
|
||||
|
||||
def mass_metadata(config, library, items=None):
|
||||
def library_operations(config, library, items=None):
|
||||
logger.info("")
|
||||
util.separator(f"Mass Editing {library.type} Library: {library.name}")
|
||||
util.separator(f"{library.name} Library Operations")
|
||||
logger.info("")
|
||||
if items is None:
|
||||
items = library.get_all()
|
||||
|
||||
if library.split_duplicates:
|
||||
items = library.search(**{"duplicate": True})
|
||||
for item in items:
|
||||
item.split()
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | Splitting"))
|
||||
radarr_adds = []
|
||||
sonarr_adds = []
|
||||
trakt_ratings = config.Trakt.user_ratings(library.is_movie) if library.mass_trakt_rating_update else []
|
||||
|
||||
for i, item in enumerate(items, 1):
|
||||
try:
|
||||
library.reload(item)
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
continue
|
||||
util.print_return(f"Processing: {i}/{len(items)} {item.title}")
|
||||
tmdb_id = None
|
||||
tvdb_id = None
|
||||
imdb_id = None
|
||||
if config.Cache:
|
||||
t_id, i_id, guid_media_type, _ = config.Cache.query_guid_map(item.guid)
|
||||
if t_id:
|
||||
if "movie" in guid_media_type:
|
||||
tmdb_id = t_id[0]
|
||||
else:
|
||||
tvdb_id = t_id[0]
|
||||
if i_id:
|
||||
imdb_id = i_id[0]
|
||||
if not tmdb_id and not tvdb_id:
|
||||
tmdb_id = library.get_tmdb_from_map(item)
|
||||
if not tmdb_id and not tvdb_id and library.is_show:
|
||||
tvdb_id = library.get_tvdb_from_map(item)
|
||||
if library.assets_for_all or library.mass_genre_update or library.mass_audience_rating_update or \
|
||||
library.mass_critic_rating_update or library.mass_trakt_rating_update or library.radarr_add_all or library.sonarr_add_all:
|
||||
if items is None:
|
||||
items = library.get_all()
|
||||
radarr_adds = []
|
||||
sonarr_adds = []
|
||||
trakt_ratings = config.Trakt.user_ratings(library.is_movie) if library.mass_trakt_rating_update else []
|
||||
|
||||
if library.mass_trakt_rating_update:
|
||||
for i, item in enumerate(items, 1):
|
||||
try:
|
||||
if library.is_movie and tmdb_id in trakt_ratings:
|
||||
new_rating = trakt_ratings[tmdb_id]
|
||||
elif library.is_show and tvdb_id in trakt_ratings:
|
||||
new_rating = trakt_ratings[tvdb_id]
|
||||
else:
|
||||
raise Failed
|
||||
if str(item.userRating) != str(new_rating):
|
||||
library.edit_query(item, {"userRating.value": new_rating, "userRating.locked": 1})
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | User Rating | {new_rating}"))
|
||||
except Failed:
|
||||
pass
|
||||
library.reload(item)
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
continue
|
||||
util.print_return(f"Processing: {i}/{len(items)} {item.title}")
|
||||
if library.assets_for_all:
|
||||
library.update_item_from_assets(item, create=library.create_asset_folders)
|
||||
tmdb_id = None
|
||||
tvdb_id = None
|
||||
imdb_id = None
|
||||
if config.Cache:
|
||||
t_id, i_id, guid_media_type, _ = config.Cache.query_guid_map(item.guid)
|
||||
if t_id:
|
||||
if "movie" in guid_media_type:
|
||||
tmdb_id = t_id[0]
|
||||
else:
|
||||
tvdb_id = t_id[0]
|
||||
if i_id:
|
||||
imdb_id = i_id[0]
|
||||
if not tmdb_id and not tvdb_id:
|
||||
tmdb_id = library.get_tmdb_from_map(item)
|
||||
if not tmdb_id and not tvdb_id and library.is_show:
|
||||
tvdb_id = library.get_tvdb_from_map(item)
|
||||
|
||||
if library.Radarr and library.radarr_add_all and tmdb_id:
|
||||
radarr_adds.append(tmdb_id)
|
||||
if library.Sonarr and library.sonarr_add_all and tvdb_id:
|
||||
sonarr_adds.append(tvdb_id)
|
||||
|
||||
tmdb_item = None
|
||||
if library.mass_genre_update == "tmdb" or library.mass_audience_rating_update == "tmdb" or library.mass_critic_rating_update == "tmdb":
|
||||
if tvdb_id and not tmdb_id:
|
||||
tmdb_id = config.Convert.tvdb_to_tmdb(tvdb_id)
|
||||
if tmdb_id:
|
||||
if library.mass_trakt_rating_update:
|
||||
try:
|
||||
tmdb_item = config.TMDb.get_movie(tmdb_id) if library.is_movie else config.TMDb.get_show(tmdb_id)
|
||||
except Failed as e:
|
||||
logger.error(util.adjust_space(str(e)))
|
||||
else:
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No TMDb ID for Guid: {item.guid}"))
|
||||
if library.is_movie and tmdb_id in trakt_ratings:
|
||||
new_rating = trakt_ratings[tmdb_id]
|
||||
elif library.is_show and tvdb_id in trakt_ratings:
|
||||
new_rating = trakt_ratings[tvdb_id]
|
||||
else:
|
||||
raise Failed
|
||||
if str(item.userRating) != str(new_rating):
|
||||
library.edit_query(item, {"userRating.value": new_rating, "userRating.locked": 1})
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | User Rating | {new_rating}"))
|
||||
except Failed:
|
||||
pass
|
||||
|
||||
omdb_item = None
|
||||
if library.mass_genre_update in ["omdb", "imdb"] or library.mass_audience_rating_update in ["omdb", "imdb"] or library.mass_critic_rating_update in ["omdb", "imdb"]:
|
||||
if config.OMDb.limit is False:
|
||||
if tmdb_id and not imdb_id:
|
||||
imdb_id = config.Convert.tmdb_to_imdb(tmdb_id)
|
||||
elif tvdb_id and not imdb_id:
|
||||
imdb_id = config.Convert.tvdb_to_imdb(tvdb_id)
|
||||
if imdb_id:
|
||||
if library.Radarr and library.radarr_add_all and tmdb_id:
|
||||
radarr_adds.append(tmdb_id)
|
||||
if library.Sonarr and library.sonarr_add_all and tvdb_id:
|
||||
sonarr_adds.append(tvdb_id)
|
||||
|
||||
tmdb_item = None
|
||||
if library.mass_genre_update == "tmdb" or library.mass_audience_rating_update == "tmdb" or library.mass_critic_rating_update == "tmdb":
|
||||
if tvdb_id and not tmdb_id:
|
||||
tmdb_id = config.Convert.tvdb_to_tmdb(tvdb_id)
|
||||
if tmdb_id:
|
||||
try:
|
||||
omdb_item = config.OMDb.get_omdb(imdb_id)
|
||||
tmdb_item = config.TMDb.get_movie(tmdb_id) if library.is_movie else config.TMDb.get_show(tmdb_id)
|
||||
except Failed as e:
|
||||
logger.error(util.adjust_space(str(e)))
|
||||
except Exception:
|
||||
logger.error(f"IMDb ID: {imdb_id}")
|
||||
raise
|
||||
else:
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}"))
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No TMDb ID for Guid: {item.guid}"))
|
||||
|
||||
tvdb_item = None
|
||||
if library.mass_genre_update == "tvdb":
|
||||
if tvdb_id:
|
||||
omdb_item = None
|
||||
if library.mass_genre_update in ["omdb", "imdb"] or library.mass_audience_rating_update in ["omdb", "imdb"] or library.mass_critic_rating_update in ["omdb", "imdb"]:
|
||||
if config.OMDb.limit is False:
|
||||
if tmdb_id and not imdb_id:
|
||||
imdb_id = config.Convert.tmdb_to_imdb(tmdb_id)
|
||||
elif tvdb_id and not imdb_id:
|
||||
imdb_id = config.Convert.tvdb_to_imdb(tvdb_id)
|
||||
if imdb_id:
|
||||
try:
|
||||
omdb_item = config.OMDb.get_omdb(imdb_id)
|
||||
except Failed as e:
|
||||
logger.error(util.adjust_space(str(e)))
|
||||
except Exception:
|
||||
logger.error(f"IMDb ID: {imdb_id}")
|
||||
raise
|
||||
else:
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No IMDb ID for Guid: {item.guid}"))
|
||||
|
||||
tvdb_item = None
|
||||
if library.mass_genre_update == "tvdb":
|
||||
if tvdb_id:
|
||||
try:
|
||||
tvdb_item = config.TVDb.get_item(tvdb_id, library.is_movie)
|
||||
except Failed as e:
|
||||
logger.error(util.adjust_space(str(e)))
|
||||
else:
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No TVDb ID for Guid: {item.guid}"))
|
||||
|
||||
if not tmdb_item and not omdb_item and not tvdb_item:
|
||||
continue
|
||||
|
||||
if library.mass_genre_update:
|
||||
try:
|
||||
tvdb_item = config.TVDb.get_item(tvdb_id, library.is_movie)
|
||||
except Failed as e:
|
||||
logger.error(util.adjust_space(str(e)))
|
||||
else:
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No TVDb ID for Guid: {item.guid}"))
|
||||
if tmdb_item and library.mass_genre_update == "tmdb":
|
||||
new_genres = [genre.name for genre in tmdb_item.genres]
|
||||
elif omdb_item and library.mass_genre_update in ["omdb", "imdb"]:
|
||||
new_genres = omdb_item.genres
|
||||
elif tvdb_item and library.mass_genre_update == "tvdb":
|
||||
new_genres = tvdb_item.genres
|
||||
else:
|
||||
raise Failed
|
||||
library.edit_tags("genre", item, sync_tags=new_genres)
|
||||
except Failed:
|
||||
pass
|
||||
if library.mass_audience_rating_update:
|
||||
try:
|
||||
if tmdb_item and library.mass_audience_rating_update == "tmdb":
|
||||
new_rating = tmdb_item.vote_average
|
||||
elif omdb_item and library.mass_audience_rating_update in ["omdb", "imdb"]:
|
||||
new_rating = omdb_item.imdb_rating
|
||||
else:
|
||||
raise Failed
|
||||
if new_rating is None:
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No Rating Found"))
|
||||
else:
|
||||
if library.mass_audience_rating_update and str(item.audienceRating) != str(new_rating):
|
||||
library.edit_query(item, {"audienceRating.value": new_rating, "audienceRating.locked": 1})
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | Audience Rating | {new_rating}"))
|
||||
except Failed:
|
||||
pass
|
||||
if library.mass_critic_rating_update:
|
||||
try:
|
||||
if tmdb_item and library.mass_critic_rating_update == "tmdb":
|
||||
new_rating = tmdb_item.vote_average
|
||||
elif omdb_item and library.mass_critic_rating_update in ["omdb", "imdb"]:
|
||||
new_rating = omdb_item.imdb_rating
|
||||
else:
|
||||
raise Failed
|
||||
if new_rating is None:
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No Rating Found"))
|
||||
else:
|
||||
if library.mass_critic_rating_update and str(item.rating) != str(new_rating):
|
||||
library.edit_query(item, {"rating.value": new_rating, "rating.locked": 1})
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | Critic Rating | {new_rating}"))
|
||||
except Failed:
|
||||
pass
|
||||
|
||||
if not tmdb_item and not omdb_item and not tvdb_item:
|
||||
continue
|
||||
|
||||
if library.mass_genre_update:
|
||||
if library.Radarr and library.radarr_add_all:
|
||||
try:
|
||||
if tmdb_item and library.mass_genre_update == "tmdb":
|
||||
new_genres = [genre.name for genre in tmdb_item.genres]
|
||||
elif omdb_item and library.mass_genre_update in ["omdb", "imdb"]:
|
||||
new_genres = omdb_item.genres
|
||||
elif tvdb_item and library.mass_genre_update == "tvdb":
|
||||
new_genres = tvdb_item.genres
|
||||
else:
|
||||
raise Failed
|
||||
library.edit_tags("genre", item, sync_tags=new_genres)
|
||||
except Failed:
|
||||
pass
|
||||
if library.mass_audience_rating_update:
|
||||
try:
|
||||
if tmdb_item and library.mass_audience_rating_update == "tmdb":
|
||||
new_rating = tmdb_item.vote_average
|
||||
elif omdb_item and library.mass_audience_rating_update in ["omdb", "imdb"]:
|
||||
new_rating = omdb_item.imdb_rating
|
||||
else:
|
||||
raise Failed
|
||||
if new_rating is None:
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No Rating Found"))
|
||||
else:
|
||||
if library.mass_audience_rating_update and str(item.audienceRating) != str(new_rating):
|
||||
library.edit_query(item, {"audienceRating.value": new_rating, "audienceRating.locked": 1})
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | Audience Rating | {new_rating}"))
|
||||
except Failed:
|
||||
pass
|
||||
if library.mass_critic_rating_update:
|
||||
try:
|
||||
if tmdb_item and library.mass_critic_rating_update == "tmdb":
|
||||
new_rating = tmdb_item.vote_average
|
||||
elif omdb_item and library.mass_critic_rating_update in ["omdb", "imdb"]:
|
||||
new_rating = omdb_item.imdb_rating
|
||||
else:
|
||||
raise Failed
|
||||
if new_rating is None:
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | No Rating Found"))
|
||||
else:
|
||||
if library.mass_critic_rating_update and str(item.rating) != str(new_rating):
|
||||
library.edit_query(item, {"rating.value": new_rating, "rating.locked": 1})
|
||||
logger.info(util.adjust_space(f"{item.title[:25]:<25} | Critic Rating | {new_rating}"))
|
||||
except Failed:
|
||||
pass
|
||||
library.Radarr.add_tmdb(radarr_adds)
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
|
||||
if library.Radarr and library.radarr_add_all:
|
||||
try:
|
||||
library.Radarr.add_tmdb(radarr_adds)
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
if library.Sonarr and library.sonarr_add_all:
|
||||
try:
|
||||
library.Sonarr.add_tvdb(sonarr_adds)
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
|
||||
if library.Sonarr and library.sonarr_add_all:
|
||||
try:
|
||||
library.Sonarr.add_tvdb(sonarr_adds)
|
||||
except Failed as e:
|
||||
logger.error(e)
|
||||
if library.delete_collections_with_less is not None or library.delete_unmanaged_collections:
|
||||
logger.info("")
|
||||
suffix = ""
|
||||
unmanaged = ""
|
||||
if library.delete_collections_with_less is not None and library.delete_collections_with_less > 0:
|
||||
suffix = f" with less then {library.delete_collections_with_less} item{'s' if library.delete_collections_with_less > 1 else ''}"
|
||||
if library.delete_unmanaged_collections:
|
||||
if library.delete_collections_with_less is None:
|
||||
unmanaged = "Unmanaged Collections "
|
||||
elif library.delete_collections_with_less > 0:
|
||||
unmanaged = "Unmanaged Collections and "
|
||||
util.separator(f"Deleting All {unmanaged}Collections{suffix}", space=False, border=False)
|
||||
logger.info("")
|
||||
unmanaged_collections = []
|
||||
for col in library.get_all_collections():
|
||||
if (library.delete_collections_with_less is not None
|
||||
and (library.delete_collections_with_less == 0 or col.childCount < library.delete_collections_with_less)) \
|
||||
or (col.title not in library.collections and library.delete_unmanaged_collections):
|
||||
library.query(col.delete)
|
||||
logger.info(f"{col.title} Deleted")
|
||||
elif col.title not in library.collections:
|
||||
unmanaged_collections.append(col)
|
||||
|
||||
if library.show_unmanaged and len(unmanaged_collections) > 0:
|
||||
logger.info("")
|
||||
util.separator(f"Unmanaged Collections in {library.name} Library", space=False, border=False)
|
||||
logger.info("")
|
||||
for col in unmanaged_collections:
|
||||
logger.info(col.title)
|
||||
logger.info("")
|
||||
logger.info(f"{len(unmanaged_collections)} Unmanaged Collection{'s' if len(unmanaged_collections) > 1 else ''}")
|
||||
elif library.show_unmanaged:
|
||||
logger.info("")
|
||||
util.separator(f"No Unmanaged Collections in {library.name} Library", space=False, border=False)
|
||||
logger.info("")
|
||||
|
||||
if library.assets_for_all and len(unmanaged_collections) > 0:
|
||||
logger.info("")
|
||||
util.separator(f"Unmanaged Collection Assets Check for {library.name} Library", space=False, border=False)
|
||||
logger.info("")
|
||||
for col in unmanaged_collections:
|
||||
poster, background = library.find_collection_assets(col, create=library.create_asset_folders)
|
||||
library.upload_images(col, poster=poster, background=background)
|
||||
|
||||
def run_collection(config, library, metadata, requested_collections):
|
||||
global stats
|
||||
logger.info("")
|
||||
for mapping_name, collection_attrs in requested_collections.items():
|
||||
collection_start = datetime.now()
|
||||
|
@ -457,7 +517,7 @@ def run_collection(config, library, metadata, requested_collections):
|
|||
collection_log_name, output_str = util.validate_filename(mapping_name)
|
||||
collection_log_folder = os.path.join(default_dir, "logs", library.mapping_name, "collections", collection_log_name)
|
||||
os.makedirs(collection_log_folder, exist_ok=True)
|
||||
col_file_logger = os.path.join(collection_log_folder, f"collection.log")
|
||||
col_file_logger = os.path.join(collection_log_folder, "collection.log")
|
||||
should_roll_over = os.path.isfile(col_file_logger)
|
||||
collection_handler = RotatingFileHandler(col_file_logger, delay=True, mode="w", backupCount=3, encoding="utf-8")
|
||||
util.apply_formatter(collection_handler)
|
||||
|
@ -486,6 +546,8 @@ def run_collection(config, library, metadata, requested_collections):
|
|||
logger.info("")
|
||||
util.print_multiline(builder.smart_filter_details, info=True)
|
||||
|
||||
items_added = 0
|
||||
items_removed = 0
|
||||
if not builder.smart_url:
|
||||
logger.info("")
|
||||
logger.info(f"Sync Mode: {'sync' if builder.sync else 'append'}")
|
||||
|
@ -501,45 +563,72 @@ def run_collection(config, library, metadata, requested_collections):
|
|||
logger.info("")
|
||||
util.separator(f"Adding to {mapping_name} Collection", space=False, border=False)
|
||||
logger.info("")
|
||||
builder.add_to_collection()
|
||||
items_added = builder.add_to_collection()
|
||||
stats["added"] += items_added
|
||||
items_removed = 0
|
||||
if builder.sync:
|
||||
items_removed = builder.sync_collection()
|
||||
stats["removed"] += items_removed
|
||||
elif len(builder.rating_keys) < builder.minimum and builder.build_collection:
|
||||
logger.info("")
|
||||
logger.info(f"Collection minimum: {builder.minimum} not met for {mapping_name} Collection")
|
||||
logger.info("")
|
||||
if library.delete_below_minimum and builder.obj:
|
||||
logger.info(f"Collection Minimum: {builder.minimum} not met for {mapping_name} Collection")
|
||||
if builder.details["delete_below_minimum"] and builder.obj:
|
||||
builder.delete_collection()
|
||||
stats["deleted"] += 1
|
||||
logger.info("")
|
||||
logger.info(f"Collection {builder.obj.title} deleted")
|
||||
|
||||
if builder.do_missing and (len(builder.missing_movies) > 0 or len(builder.missing_shows) > 0):
|
||||
if builder.details["show_missing"] is True:
|
||||
logger.info("")
|
||||
util.separator(f"Missing from Library", space=False, border=False)
|
||||
logger.info("")
|
||||
builder.run_missing()
|
||||
if builder.sync and len(builder.rating_keys) > 0 and builder.build_collection:
|
||||
builder.sync_collection()
|
||||
radarr_add, sonarr_add = builder.run_missing()
|
||||
stats["radarr"] += radarr_add
|
||||
stats["sonarr"] += sonarr_add
|
||||
|
||||
run_item_details = True
|
||||
if builder.build_collection:
|
||||
logger.info("")
|
||||
util.separator(f"Updating Details of {mapping_name} Collection", space=False, border=False)
|
||||
logger.info("")
|
||||
builder.update_details()
|
||||
try:
|
||||
builder.load_collection()
|
||||
if builder.created:
|
||||
stats["created"] += 1
|
||||
elif items_added > 0 or items_removed > 0:
|
||||
stats["modified"] += 1
|
||||
except Failed:
|
||||
util.print_stacktrace()
|
||||
run_item_details = False
|
||||
logger.info("")
|
||||
util.separator("No Collection to Update", space=False, border=False)
|
||||
else:
|
||||
builder.update_details()
|
||||
if builder.custom_sort:
|
||||
library.run_sort.append(builder)
|
||||
# builder.sort_collection()
|
||||
|
||||
if builder.custom_sort:
|
||||
library.run_sort.append(builder)
|
||||
# logger.info("")
|
||||
# util.separator(f"Sorting {mapping_name} Collection", space=False, border=False)
|
||||
# logger.info("")
|
||||
# builder.sort_collection()
|
||||
builder.send_notifications()
|
||||
|
||||
builder.update_item_details()
|
||||
if builder.item_details and run_item_details:
|
||||
try:
|
||||
builder.load_collection_items()
|
||||
except Failed:
|
||||
logger.info("")
|
||||
util.separator("No Items Found", space=False, border=False)
|
||||
else:
|
||||
builder.update_item_details()
|
||||
|
||||
if builder.run_again and (len(builder.run_again_movies) > 0 or len(builder.run_again_shows) > 0):
|
||||
library.run_again.append(builder)
|
||||
|
||||
|
||||
except NotScheduled as e:
|
||||
util.print_multiline(e, info=True)
|
||||
except Failed as e:
|
||||
library.notify(e, collection=mapping_name)
|
||||
util.print_stacktrace()
|
||||
util.print_multiline(e, error=True)
|
||||
except Exception as e:
|
||||
library.notify(f"Unknown Error: {e}", collection=mapping_name)
|
||||
util.print_stacktrace()
|
||||
logger.error(f"Unknown Error: {e}")
|
||||
logger.info("")
|
||||
|
@ -548,7 +637,14 @@ def run_collection(config, library, metadata, requested_collections):
|
|||
|
||||
try:
|
||||
if run or test or collections or libraries or resume:
|
||||
start(config_file, is_test=test, requested_collections=collections, requested_libraries=libraries, resume_from=resume)
|
||||
start({
|
||||
"config_file": config_file,
|
||||
"test": test,
|
||||
"collections": collections,
|
||||
"libraries": libraries,
|
||||
"resume": resume,
|
||||
"trace": trace
|
||||
})
|
||||
else:
|
||||
times_to_run = util.get_list(times)
|
||||
valid_times = []
|
||||
|
@ -561,7 +657,7 @@ try:
|
|||
else:
|
||||
raise Failed(f"Argument Error: blank time argument")
|
||||
for time_to_run in valid_times:
|
||||
schedule.every().day.at(time_to_run).do(start, config_file, time_scheduled=time_to_run)
|
||||
schedule.every().day.at(time_to_run).do(start, {"config_file": config_file, "time": time_to_run, "trace": trace})
|
||||
while True:
|
||||
schedule.run_pending()
|
||||
if not no_countdown:
|
||||
|
@ -575,11 +671,14 @@ try:
|
|||
if (seconds is None or new_seconds < seconds) and new_seconds > 0:
|
||||
seconds = new_seconds
|
||||
og_time_str = time_to_run
|
||||
hours = int(seconds // 3600)
|
||||
minutes = int((seconds % 3600) // 60)
|
||||
time_str = f"{hours} Hour{'s' if hours > 1 else ''} and " if hours > 0 else ""
|
||||
time_str += f"{minutes} Minute{'s' if minutes > 1 else ''}"
|
||||
util.print_return(f"Current Time: {current} | {time_str} until the next run at {og_time_str} | Runs: {', '.join(times_to_run)}")
|
||||
if seconds is not None:
|
||||
hours = int(seconds // 3600)
|
||||
minutes = int((seconds % 3600) // 60)
|
||||
time_str = f"{hours} Hour{'s' if hours > 1 else ''} and " if hours > 0 else ""
|
||||
time_str += f"{minutes} Minute{'s' if minutes > 1 else ''}"
|
||||
util.print_return(f"Current Time: {current} | {time_str} until the next run at {og_time_str} | Runs: {', '.join(times_to_run)}")
|
||||
else:
|
||||
logger.error(f"Time Error: {valid_times}")
|
||||
time.sleep(60)
|
||||
except KeyboardInterrupt:
|
||||
util.separator("Exiting Plex Meta Manager")
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
PlexAPI==4.7.0
|
||||
PlexAPI==4.7.2
|
||||
tmdbv3api==1.7.6
|
||||
arrapi==1.1.3
|
||||
lxml==4.6.3
|
||||
arrapi==1.2.3
|
||||
lxml==4.6.4
|
||||
requests==2.26.0
|
||||
ruamel.yaml==0.17.10
|
||||
ruamel.yaml==0.17.17
|
||||
schedule==1.1.0
|
||||
retrying==1.3.3
|
||||
pathvalidate==2.4.1
|
||||
pillow==8.3.2
|
||||
pathvalidate==2.5.0
|
||||
pillow==8.4.0
|
Loading…
Reference in a new issue