[43] update list_buffer

This commit is contained in:
meisnate12 2023-04-27 23:43:26 -04:00
parent 779c32cbe7
commit 2bb5cfc69d
15 changed files with 64 additions and 69 deletions

View file

@ -1 +1 @@
1.19.0-develop42
1.19.0-develop43

View file

@ -1282,7 +1282,7 @@ class CollectionBuilder:
raise Failed(f"{self.Type} Error: Cannot use item_genre.remove and item_genre.sync together")
self.item_details[method_final] = util.get_list(method_data) if method_data else []
elif method_name == "item_edition":
self.item_details[method_final] = str(method_data) if method_data else ""
self.item_details[method_final] = str(method_data) if method_data else "" # noqa
elif method_name == "non_item_remove_label":
if not method_data:
raise Failed(f"{self.Type} Error: non_item_remove_label is blank")
@ -1312,7 +1312,7 @@ class CollectionBuilder:
elif str(method_data).lower() not in options:
logger.error(f"Metadata Error: {method_data} {method_name} attribute invalid")
else:
self.item_details[method_name] = str(method_data).lower()
self.item_details[method_name] = str(method_data).lower() # noqa
def _radarr(self, method_name, method_data):
if method_name in ["radarr_add_missing", "radarr_add_existing", "radarr_upgrade_existing", "radarr_search", "radarr_monitor", "radarr_ignore_cache"]:
@ -1670,12 +1670,12 @@ class CollectionBuilder:
"list_size": util.parse(self.Type, "list_size", dict_data, datatype="int", methods=dict_methods, default=10, parent=method_name),
"list_minimum": util.parse(self.Type, "list_minimum", dict_data, datatype="int", methods=dict_methods, default=0, parent=method_name)
}
buff = final_dict["list_size"] * 3
if self.library.Tautulli.has_section:
final_dict["list_buffer"] = 0
buff = 0
elif "list_buffer" in dict_methods:
final_dict["list_buffer"] = util.parse(self.Type, "list_buffer", dict_data, datatype="int", methods=dict_methods, default=20, parent=method_name)
else:
final_dict["list_buffer"] = final_dict["list_size"] * 3
buff = util.parse(self.Type, "list_buffer", dict_data, datatype="int", methods=dict_methods, default=buff, parent=method_name)
final_dict["list_buffer"] = buff
self.builders.append((method_name, final_dict))
def _tmdb(self, method_name, method_data):

View file

@ -166,12 +166,12 @@ class ConfigFile:
self.data = YAML(self.config_path).data
def replace_attr(all_data, attr, par):
def replace_attr(all_data, in_attr, par):
if "settings" not in all_data:
all_data["settings"] = {}
if par in all_data and all_data[par] and attr in all_data[par] and attr not in all_data["settings"]:
all_data["settings"][attr] = all_data[par][attr]
del all_data[par][attr]
if par in all_data and all_data[par] and in_attr in all_data[par] and in_attr not in all_data["settings"]:
all_data["settings"][in_attr] = all_data[par][in_attr]
del all_data[par][in_attr]
if "libraries" not in self.data:
self.data["libraries"] = {}
if "settings" not in self.data:
@ -228,9 +228,9 @@ class ConfigFile:
self.data["libraries"][library]["operations"]["mass_imdb_parental_labels"] = "mild"
if "webhooks" in self.data["libraries"][library] and self.data["libraries"][library]["webhooks"] and "collection_changes" not in self.data["libraries"][library]["webhooks"]:
changes = []
def hooks(attr):
if attr in self.data["libraries"][library]["webhooks"]:
changes.extend([w for w in util.get_list(self.data["libraries"][library]["webhooks"].pop(attr), split=False) if w not in changes])
def hooks(hook_attr):
if hook_attr in self.data["libraries"][library]["webhooks"]:
changes.extend([w for w in util.get_list(self.data["libraries"][library]["webhooks"].pop(hook_attr), split=False) if w not in changes])
hooks("collection_creation")
hooks("collection_addition")
hooks("collection_removal")
@ -251,9 +251,9 @@ class ConfigFile:
temp = self.data.pop("webhooks")
if "changes" not in temp:
changes = []
def hooks(attr):
if attr in temp:
items = util.get_list(temp.pop(attr), split=False)
def hooks(hook_attr):
if hook_attr in temp:
items = util.get_list(temp.pop(hook_attr), split=False)
if items:
changes.extend([w for w in items if w not in changes])
hooks("collection_creation")
@ -377,10 +377,10 @@ class ConfigFile:
raise Failed(f"Config Error: {attribute} attribute must be set under {parent} globally or under this specific Library")
options = ""
if test_list:
for option, description in test_list.items():
for test_option, test_description in test_list.items():
if len(options) > 0:
options = f"{options}\n"
options = f"{options} {option} ({description})"
options = f"{options} {test_option} ({test_description})"
if (default is None and not default_is_none) or throw:
if len(options) > 0:
message = message + "\n" + options
@ -873,16 +873,13 @@ class ConfigFile:
params["reapply_overlays"] = True
if "reset_overlays" in file or "reset_overlay" in file:
attr = f"reset_overlay{'s' if 'reset_overlays' in file else ''}"
if file[attr] and not isinstance(file[attr], list):
test_list = [file[attr]]
else:
test_list = file[attr]
reset_options = file[attr] if isinstance(file[attr], list) else [file[attr]]
final_list = []
for test_item in test_list:
if test_item and test_item in reset_overlay_options:
final_list.append(test_item)
for reset_option in reset_options:
if reset_option and reset_option in reset_overlay_options:
final_list.append(reset_option)
else:
final_text = f"Config Error: reset_overlays attribute {test_item} invalid. Options: "
final_text = f"Config Error: reset_overlays attribute {reset_option} invalid. Options: "
for option, description in reset_overlay_options.items():
final_text = f"{final_text}\n {option} ({description})"
logger.error(final_text)

View file

@ -123,7 +123,7 @@ class IMDb:
imdb_ids = []
parsed_url = urlparse(imdb_url)
params = parse_qs(parsed_url.query)
imdb_base = parsed_url._replace(query=None).geturl()
imdb_base = parsed_url._replace(query=None).geturl() # noqa
params.pop("start", None) # noqa
params.pop("count", None) # noqa
params.pop("page", None) # noqa

View file

@ -118,14 +118,14 @@ class Library(ABC):
self.status = {}
self.items_library_operation = True if self.assets_for_all or self.mass_genre_update or self.remove_title_parentheses \
or self.mass_audience_rating_update or self.mass_critic_rating_update or self.mass_user_rating_update \
or self.mass_episode_audience_rating_update or self.mass_episode_critic_rating_update or self.mass_episode_user_rating_update \
or self.mass_content_rating_update or self.mass_originally_available_update or self.mass_original_title_update\
or self.mass_imdb_parental_labels or self.mass_episode_imdb_parental_labels or self.genre_mapper or self.content_rating_mapper or self.mass_studio_update\
or self.radarr_add_all_existing or self.sonarr_add_all_existing or self.mass_poster_update or self.mass_background_update else False
or self.mass_audience_rating_update or self.mass_critic_rating_update or self.mass_user_rating_update \
or self.mass_episode_audience_rating_update or self.mass_episode_critic_rating_update or self.mass_episode_user_rating_update \
or self.mass_content_rating_update or self.mass_originally_available_update or self.mass_original_title_update\
or self.mass_imdb_parental_labels or self.mass_episode_imdb_parental_labels or self.genre_mapper or self.content_rating_mapper or self.mass_studio_update\
or self.radarr_add_all_existing or self.sonarr_add_all_existing or self.mass_poster_update or self.mass_background_update else False
self.library_operation = True if self.items_library_operation or self.delete_collections or self.mass_collection_mode \
or self.radarr_remove_by_tag or self.sonarr_remove_by_tag or self.show_unmanaged or self.show_unconfigured \
or self.metadata_backup or self.update_blank_track_titles else False
or self.radarr_remove_by_tag or self.sonarr_remove_by_tag or self.show_unmanaged or self.show_unconfigured \
or self.metadata_backup or self.update_blank_track_titles else False
self.meta_operations = [i for i in [getattr(self, o) for o in operations.meta_operations] if i]
self.label_operations = True if self.assets_for_all or self.mass_imdb_parental_labels or self.mass_episode_imdb_parental_labels else False

View file

@ -189,7 +189,7 @@ class Mdblist:
logger.info(f"Limit: {data['limit']} items")
params["limit"] = data["limit"]
parsed_url = urlparse(data["url"])
url_base = str(parsed_url._replace(query=None).geturl())
url_base = str(parsed_url._replace(query=None).geturl()) # noqa
url_base = url_base if url_base.endswith("/") else f"{url_base}/"
url_base = url_base if url_base.endswith("json/") else f"{url_base}json/"
try:

View file

@ -1,4 +1,4 @@
import math, operator, os, re, requests
import math, operator, os, re
from datetime import datetime
from modules import plex, ergast, util
from modules.util import Failed, NotScheduled, YAML
@ -329,14 +329,14 @@ class DataFile:
default = {}
if all_init_defaults:
var_default = {replace_var(dk, variables): replace_var(dv, variables) for dk, dv in all_init_defaults.items() if dk not in variables}
for dkey, dvalue in var_default.items():
final_key = replace_var(dkey, var_default)
for d_key, d_value in var_default.items():
final_key = replace_var(d_key, var_default)
if final_key not in optional and final_key not in variables and final_key not in conditionals:
default[final_key] = dvalue
if "<<" in str(dvalue):
default[f"{final_key}_encoded"] = re.sub(r'<<(.+)>>', r'<<\1_encoded>>', dvalue)
default[final_key] = d_value
if "<<" in str(d_value):
default[f"{final_key}_encoded"] = re.sub(r'<<(.+)>>', r'<<\1_encoded>>', d_value)
else:
default[f"{final_key}_encoded"] = util.quote(dvalue)
default[f"{final_key}_encoded"] = util.quote(d_value)
if "optional" in template:
if template["optional"]:
@ -483,7 +483,7 @@ class DataFile:
elif f"<<{var}" in str(og_txt):
final = str(og_txt).replace(f"<<{var}>>", str(actual_value)) if f"<<{var}>>" in str(og_txt) else str(og_txt)
if f"<<{var}" in final:
match = re.search(f"<<({var}([+-])(\d+))>>", final)
match = re.search(f"<<({var}([+-])(\\d+))>>", final)
if match:
try:
final = final.replace(f"<<{match.group(1)}>>", str(int(actual_value) + (int(match.group(3)) * (-1 if match.group(2) == "-" else 1))))
@ -664,7 +664,7 @@ class MetadataFile(DataFile):
raise Failed(f"Image Section Error: No styles found for section: {section_key}")
use_key = None
if f"use_{section_key}" in methods:
use_key = util.parse("Images", f"use_{section_key}", self.temp_vars, datatype="bool",methods=methods, default=False)
use_key = util.parse("Images", f"use_{section_key}", self.temp_vars, datatype="bool", methods=methods, default=False)
logger.info(f"Use {section_key}: {use_key}")
if use_key is False:
logger.trace(f"Skipped as use_{section_key} is false")
@ -842,7 +842,7 @@ class MetadataFile(DataFile):
all_keys = {}
auto_list = {}
for i in tags:
final_title = self.config.TMDb.TMDb._iso_639_1[str(i.key)].english_name if str(i.key) in self.config.TMDb.TMDb._iso_639_1 else str(i.title)
final_title = self.config.TMDb.TMDb._iso_639_1[str(i.key)].english_name if str(i.key) in self.config.TMDb.TMDb._iso_639_1 else str(i.title) # noqa
all_keys[str(i.key)] = final_title
if all([x not in exclude for x in [final_title, str(i.title), str(i.key)]]):
auto_list[str(i.key)] = final_title
@ -1555,7 +1555,7 @@ class MetadataFile(DataFile):
else:
values = [loc for loc in i.locations if loc]
if not values:
raise Failed(f"Plex Error: No Filepaths found for {i.title}")
raise Failed(f"Plex Error: No Filepaths found for {i.title}")
res = re.search(r'(?i)[\[{]edition-([^}\]]*)', values[0])
check = res.group(1) if res else ""
if blank_edition and not check:

View file

@ -5,7 +5,7 @@ from modules.builder import CollectionBuilder
from modules.util import Failed, FilterFailed, NonExisting, NotScheduled
from num2words import num2words
from plexapi.exceptions import BadRequest
from plexapi.video import Movie, Show, Season, Episode
from plexapi.video import Season, Episode
from PIL import Image, ImageFilter
logger = util.logger
@ -122,17 +122,17 @@ class Overlays:
actual = plex.attribute_translation[cache_key] if cache_key in plex.attribute_translation else cache_key
if not hasattr(item, actual):
continue
actual_value = getattr(item, actual)
if cache_value is None or actual_value is None:
real_value = getattr(item, actual)
if cache_value is None or real_value is None:
continue
if cache_key in overlay.float_vars:
cache_value = float(cache_value)
if cache_key in overlay.int_vars:
cache_value = int(cache_value)
if cache_key in overlay.date_vars:
actual_value = actual_value.strftime("%Y-%m-%d")
if actual_value != cache_value:
overlay_change = f"Special Text Changed from {cache_value} to {actual_value}"
real_value = real_value.strftime("%Y-%m-%d")
if real_value != cache_value:
overlay_change = f"Special Text Changed from {cache_value} to {real_value}"
try:
poster, background, item_dir, name = self.library.find_item_assets(item)
if not poster and self.library.assets_for_all:

View file

@ -764,9 +764,9 @@ class Plex(Library):
try:
tag = next(f for f in self.Plex.listFilters(libtype) if f.filter == tag)
except StopIteration:
availableFilters = [f.filter for f in self.Plex.listFilters(libtype)]
available_filters = [f.filter for f in self.Plex.listFilters(libtype)]
raise NotFound(f'Unknown filter field "{tag}" for libtype "{libtype}". '
f'Available filters: {availableFilters}') from None
f'Available filters: {available_filters}') from None
items = self.Plex.findItems(self.Plex._server.query(tag.key), FilterChoice)
if tag.key.endswith("/collection?type=4"):
keys = [k.key for k in items]

View file

@ -23,7 +23,7 @@ class Radarr:
try:
self.api = RadarrAPI(self.url, self.token, session=self.config.session)
self.api.respect_list_exclusions_when_adding()
self.api._validate_add_options(params["root_folder_path"], params["quality_profile"])
self.api._validate_add_options(params["root_folder_path"], params["quality_profile"]) # noqa
self.profiles = self.api.quality_profile()
except ArrException as e:
raise Failed(e)

View file

@ -39,7 +39,7 @@ class Sonarr:
try:
self.api = SonarrAPI(self.url, self.token, session=self.config.session)
self.api.respect_list_exclusions_when_adding()
self.api._validate_add_options(params["root_folder_path"], params["quality_profile"], params["language_profile"])
self.api._validate_add_options(params["root_folder_path"], params["quality_profile"], params["language_profile"]) # noqa
self.profiles = self.api.quality_profile()
except ArrException as e:
raise Failed(e)
@ -80,7 +80,7 @@ class Sonarr:
monitor = monitor_translation[options["monitor"] if "monitor" in options else self.monitor]
quality_profile = options["quality"] if "quality" in options else self.quality_profile
language_profile = options["language"] if "language" in options else self.language_profile
language_profile = language_profile if self.api._raw.v3 else 1
language_profile = language_profile if self.api._raw.v3 else 1 # noqa
series_type = options["series"] if "series" in options else self.series_type
season = options["season"] if "season" in options else self.season_folder
tags = options["tag"] if "tag" in options else self.tag

View file

@ -153,9 +153,9 @@ class TMDbShow(TMDBObj):
self.type = data["type"] if isinstance(data, dict) else data.type
self.studio = data["studio"] if isinstance(data, dict) else data.networks[0].name if data.networks else None
self.tvdb_id = data["tvdb_id"] if isinstance(data, dict) else data.tvdb_id
loop = data.origin_countries if not isinstance(data, dict) else data["countries"].split("|") if data["countries"] else []
loop = data.origin_countries if not isinstance(data, dict) else data["countries"].split("|") if data["countries"] else [] # noqa
self.countries = [TMDbCountry(c) for c in loop]
loop = data.seasons if not isinstance(data, dict) else data["seasons"].split("|") if data["seasons"] else []
loop = data.seasons if not isinstance(data, dict) else data["seasons"].split("|") if data["seasons"] else [] # noqa
self.seasons = [TMDbSeason(s) for s in loop]
if self._tmdb.config.Cache and not ignore_cache:
@ -180,7 +180,7 @@ class TMDb:
self.TMDb = TMDbAPIs(self.apikey, language=self.language, session=self.config.session)
except TMDbException as e:
raise Failed(f"TMDb Error: {e}")
self.iso_3166_1 = {iso: i.name for iso, i in self.TMDb._iso_3166_1.items()}
self.iso_3166_1 = {iso: i.name for iso, i in self.TMDb._iso_3166_1.items()} # noqa
def convert_from(self, tmdb_id, convert_to, is_movie):
item = self.get_movie(tmdb_id) if is_movie else self.get_show(tmdb_id)

View file

@ -87,7 +87,7 @@ class TVDbObj:
released = parse_page("//strong[text()='First Aired']/parent::li/span/text()[normalize-space()]")
try:
self.release_date = datetime.strptime(released, "%B %d, %Y") if released else released
self.release_date = datetime.strptime(released, "%B %d, %Y") if released else released # noqa
except ValueError:
self.release_date = None

View file

@ -14,7 +14,7 @@ except ModuleNotFoundError:
windows = False
logger: MyLogger = None
logger: MyLogger = None # noqa
class TimeoutExpired(Exception):
pass
@ -308,7 +308,7 @@ def windows_input(prompt, timeout=5):
sys.stdout.write(f"| {prompt}: ")
sys.stdout.flush()
result = []
start_time = time.time()
s_time = time.time()
while True:
if msvcrt.kbhit():
char = msvcrt.getwche()
@ -319,7 +319,7 @@ def windows_input(prompt, timeout=5):
return out
elif ord(char) >= 32: #space_char
result.append(char)
if (time.time() - start_time) > timeout:
if (time.time() - s_time) > timeout:
print("")
raise TimeoutExpired
@ -977,5 +977,3 @@ class YAML:
if self.path:
with open(self.path, 'w', encoding="utf-8") as fp:
self.yaml.dump(self.data, fp)

View file

@ -232,9 +232,9 @@ class Webhooks:
for col in row:
section["fields"].append({"type": "mrkdwn", "text": col[0]})
section["fields"].append({"type": "plain_text", "text": col[1]})
new_json["blocks"].append(section)
new_json["blocks"].append(section) # noqa
else:
new_json["blocks"].append({"type": "divider"})
new_json["blocks"].append({"type": "divider"}) # noqa
return new_json
def discord(self, json):