mirror of
https://github.com/meisnate12/Plex-Meta-Manager
synced 2024-11-14 00:37:22 +00:00
add ability to run multiple times per day
This commit is contained in:
parent
00b128a967
commit
f28c09bf4f
5 changed files with 110 additions and 72 deletions
|
@ -33,6 +33,9 @@ plex: # Can be individually specified
|
||||||
url: http://192.168.1.12:32400
|
url: http://192.168.1.12:32400
|
||||||
token: ####################
|
token: ####################
|
||||||
timeout: 60
|
timeout: 60
|
||||||
|
clean_bundles: false
|
||||||
|
empty_trash: false
|
||||||
|
optimize: false
|
||||||
tmdb:
|
tmdb:
|
||||||
apikey: ################################
|
apikey: ################################
|
||||||
language: en
|
language: en
|
||||||
|
|
|
@ -345,20 +345,32 @@ class CollectionBuilder:
|
||||||
last_day = next_month - timedelta(days=next_month.day)
|
last_day = next_month - timedelta(days=next_month.day)
|
||||||
for schedule in schedule_list:
|
for schedule in schedule_list:
|
||||||
run_time = str(schedule).lower()
|
run_time = str(schedule).lower()
|
||||||
if run_time.startswith("day") or run_time.startswith("daily"):
|
if run_time.startswith(("day", "daily")):
|
||||||
skip_collection = False
|
skip_collection = False
|
||||||
elif run_time.startswith("week") or run_time.startswith("month") or run_time.startswith("year"):
|
elif run_time.startswith(("hour", "week", "month", "year")):
|
||||||
match = re.search("\\(([^)]+)\\)", run_time)
|
match = re.search("\\(([^)]+)\\)", run_time)
|
||||||
if match:
|
if not match:
|
||||||
|
logger.error(f"Collection Error: failed to parse schedule: {schedule}")
|
||||||
|
continue
|
||||||
param = match.group(1)
|
param = match.group(1)
|
||||||
if run_time.startswith("week"):
|
if run_time.startswith("hour"):
|
||||||
if param.lower() in util.days_alias:
|
try:
|
||||||
|
if 0 <= int(param) <= 23:
|
||||||
|
self.schedule += f"\nScheduled to run only on the {util.make_ordinal(param)} hour"
|
||||||
|
if config.run_hour == int(param):
|
||||||
|
skip_collection = False
|
||||||
|
else:
|
||||||
|
raise ValueError
|
||||||
|
except ValueError:
|
||||||
|
logger.error(f"Collection Error: hourly schedule attribute {schedule} invalid must be an integer between 0 and 23")
|
||||||
|
elif run_time.startswith("week"):
|
||||||
|
if param.lower() not in util.days_alias:
|
||||||
|
logger.error(f"Collection Error: weekly schedule attribute {schedule} invalid must be a day of the week i.e. weekly(Monday)")
|
||||||
|
continue
|
||||||
weekday = util.days_alias[param.lower()]
|
weekday = util.days_alias[param.lower()]
|
||||||
self.schedule += f"\nScheduled weekly on {util.pretty_days[weekday]}"
|
self.schedule += f"\nScheduled weekly on {util.pretty_days[weekday]}"
|
||||||
if weekday == current_time.weekday():
|
if weekday == current_time.weekday():
|
||||||
skip_collection = False
|
skip_collection = False
|
||||||
else:
|
|
||||||
logger.error(f"Collection Error: weekly schedule attribute {schedule} invalid must be a day of the week i.e. weekly(Monday)")
|
|
||||||
elif run_time.startswith("month"):
|
elif run_time.startswith("month"):
|
||||||
try:
|
try:
|
||||||
if 1 <= int(param) <= 31:
|
if 1 <= int(param) <= 31:
|
||||||
|
@ -366,21 +378,19 @@ class CollectionBuilder:
|
||||||
if current_time.day == int(param) or (current_time.day == last_day.day and int(param) > last_day.day):
|
if current_time.day == int(param) or (current_time.day == last_day.day and int(param) > last_day.day):
|
||||||
skip_collection = False
|
skip_collection = False
|
||||||
else:
|
else:
|
||||||
logger.error(f"Collection Error: monthly schedule attribute {schedule} invalid must be between 1 and 31")
|
raise ValueError
|
||||||
except ValueError:
|
except ValueError:
|
||||||
logger.error(f"Collection Error: monthly schedule attribute {schedule} invalid must be an integer")
|
logger.error(f"Collection Error: monthly schedule attribute {schedule} invalid must be an integer between 1 and 31")
|
||||||
elif run_time.startswith("year"):
|
elif run_time.startswith("year"):
|
||||||
match = re.match("^(1[0-2]|0?[1-9])/(3[01]|[12][0-9]|0?[1-9])$", param)
|
match = re.match("^(1[0-2]|0?[1-9])/(3[01]|[12][0-9]|0?[1-9])$", param)
|
||||||
if match:
|
if not match:
|
||||||
|
logger.error(f"Collection Error: yearly schedule attribute {schedule} invalid must be in the MM/DD format i.e. yearly(11/22)")
|
||||||
|
continue
|
||||||
month = int(match.group(1))
|
month = int(match.group(1))
|
||||||
day = int(match.group(2))
|
day = int(match.group(2))
|
||||||
self.schedule += f"\nScheduled yearly on {util.pretty_months[month]} {util.make_ordinal(day)}"
|
self.schedule += f"\nScheduled yearly on {util.pretty_months[month]} {util.make_ordinal(day)}"
|
||||||
if current_time.month == month and (current_time.day == day or (current_time.day == last_day.day and day > last_day.day)):
|
if current_time.month == month and (current_time.day == day or (current_time.day == last_day.day and day > last_day.day)):
|
||||||
skip_collection = False
|
skip_collection = False
|
||||||
else:
|
|
||||||
logger.error(f"Collection Error: yearly schedule attribute {schedule} invalid must be in the MM/DD format i.e. yearly(11/22)")
|
|
||||||
else:
|
|
||||||
logger.error(f"Collection Error: failed to parse schedule: {schedule}")
|
|
||||||
else:
|
else:
|
||||||
logger.error(f"Collection Error: schedule attribute {schedule} invalid")
|
logger.error(f"Collection Error: schedule attribute {schedule} invalid")
|
||||||
if len(self.schedule) == 0:
|
if len(self.schedule) == 0:
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import logging, os
|
import logging, os
|
||||||
|
from datetime import datetime
|
||||||
from modules import util
|
from modules import util
|
||||||
from modules.anidb import AniDBAPI
|
from modules.anidb import AniDBAPI
|
||||||
from modules.anilist import AniListAPI
|
from modules.anilist import AniListAPI
|
||||||
|
@ -48,7 +49,7 @@ mass_update_options = {"tmdb": "Use TMDb Metadata", "omdb": "Use IMDb Metadata t
|
||||||
library_types = {"movie": "For Movie Libraries", "show": "For Show Libraries"}
|
library_types = {"movie": "For Movie Libraries", "show": "For Show Libraries"}
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
def __init__(self, default_dir, config_path=None, libraries_to_run=None):
|
def __init__(self, default_dir, config_path=None, is_test=False, time_scheduled=None, requested_collections=None, requested_libraries=None, resume_from=None):
|
||||||
logger.info("Locating config...")
|
logger.info("Locating config...")
|
||||||
if config_path and os.path.exists(config_path): self.config_path = os.path.abspath(config_path)
|
if config_path and os.path.exists(config_path): self.config_path = os.path.abspath(config_path)
|
||||||
elif config_path and not os.path.exists(config_path): raise Failed(f"Config Error: config not found at {os.path.abspath(config_path)}")
|
elif config_path and not os.path.exists(config_path): raise Failed(f"Config Error: config not found at {os.path.abspath(config_path)}")
|
||||||
|
@ -56,6 +57,13 @@ class Config:
|
||||||
else: raise Failed(f"Config Error: config not found at {os.path.abspath(default_dir)}")
|
else: raise Failed(f"Config Error: config not found at {os.path.abspath(default_dir)}")
|
||||||
logger.info(f"Using {self.config_path} as config")
|
logger.info(f"Using {self.config_path} as config")
|
||||||
|
|
||||||
|
self.test_mode = is_test
|
||||||
|
self.run_start_time = time_scheduled
|
||||||
|
self.run_hour = datetime.strptime(time_scheduled, "%H:%M").hour
|
||||||
|
self.requested_collections = util.get_list(requested_collections)
|
||||||
|
self.requested_libraries = util.get_list(requested_libraries)
|
||||||
|
self.resume_from = resume_from
|
||||||
|
|
||||||
yaml.YAML().allow_duplicate_keys = True
|
yaml.YAML().allow_duplicate_keys = True
|
||||||
try:
|
try:
|
||||||
new_config, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.config_path, encoding="utf-8"))
|
new_config, ind, bsi = yaml.util.load_yaml_guess_indent(open(self.config_path, encoding="utf-8"))
|
||||||
|
@ -312,9 +320,9 @@ class Config:
|
||||||
self.libraries = []
|
self.libraries = []
|
||||||
try: libs = check_for_attribute(self.data, "libraries", throw=True)
|
try: libs = check_for_attribute(self.data, "libraries", throw=True)
|
||||||
except Failed as e: raise Failed(e)
|
except Failed as e: raise Failed(e)
|
||||||
requested_libraries = util.get_list(libraries_to_run) if libraries_to_run else None
|
|
||||||
for library_name, lib in libs.items():
|
for library_name, lib in libs.items():
|
||||||
if requested_libraries and library_name not in requested_libraries:
|
if self.requested_libraries and library_name not in self.requested_libraries:
|
||||||
continue
|
continue
|
||||||
util.separator()
|
util.separator()
|
||||||
params = {}
|
params = {}
|
||||||
|
|
|
@ -222,7 +222,8 @@ def compile_list(data):
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def get_list(data, lower=False, split=True, int_list=False):
|
def get_list(data, lower=False, split=True, int_list=False):
|
||||||
if isinstance(data, list): return data
|
if data is None: return None
|
||||||
|
elif isinstance(data, list): return data
|
||||||
elif isinstance(data, dict): return [data]
|
elif isinstance(data, dict): return [data]
|
||||||
elif split is False: return [str(data)]
|
elif split is False: return [str(data)]
|
||||||
elif lower is True: return [d.strip().lower() for d in str(data).split(",")]
|
elif lower is True: return [d.strip().lower() for d in str(data).split(",")]
|
||||||
|
|
|
@ -7,13 +7,17 @@ try:
|
||||||
from modules.config import Config
|
from modules.config import Config
|
||||||
from modules.util import Failed
|
from modules.util import Failed
|
||||||
except ModuleNotFoundError:
|
except ModuleNotFoundError:
|
||||||
print("Error: Requirements are not installed")
|
print("Requirements Error: Requirements are not installed")
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
if sys.version_info[0] != 3 or sys.version_info[1] < 6:
|
||||||
|
print("Version Error: Version: %s.%s.%s incompatible please use Python 3.6+" % (sys.version_info[0], sys.version_info[1], sys.version_info[2]))
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument("-db", "--debug", dest="debug", help=argparse.SUPPRESS, action="store_true", default=False)
|
parser.add_argument("-db", "--debug", dest="debug", help=argparse.SUPPRESS, action="store_true", default=False)
|
||||||
parser.add_argument("-c", "--config", dest="config", help="Run with desired *.yml file", type=str)
|
parser.add_argument("-c", "--config", dest="config", help="Run with desired *.yml file", type=str)
|
||||||
parser.add_argument("-t", "--time", dest="time", help="Time to update each day use format HH:MM (Default: 03:00)", default="03:00", type=str)
|
parser.add_argument("-t", "--time", dest="time", help="Times to update each day use format HH:MM (Default: 03:00) (comma-separated list)", default="03:00", type=str)
|
||||||
parser.add_argument("-re", "--resume", dest="resume", help="Resume collection run from a specific collection", type=str)
|
parser.add_argument("-re", "--resume", dest="resume", help="Resume collection run from a specific collection", type=str)
|
||||||
parser.add_argument("-r", "--run", dest="run", help="Run without the scheduler", action="store_true", default=False)
|
parser.add_argument("-r", "--run", dest="run", help="Run without the scheduler", action="store_true", default=False)
|
||||||
parser.add_argument("-rt", "--test", "--tests", "--run-test", "--run-tests", dest="test", help="Run in debug mode with only collections that have test: true", action="store_true", default=False)
|
parser.add_argument("-rt", "--test", "--tests", "--run-test", "--run-tests", dest="test", help="Run in debug mode with only collections that have test: true", action="store_true", default=False)
|
||||||
|
@ -48,8 +52,9 @@ collections = os.environ.get("PMM_COLLECTIONS") if os.environ.get("PMM_COLLECTIO
|
||||||
libraries = os.environ.get("PMM_LIBRARIES") if os.environ.get("PMM_LIBRARIES") else args.libraries
|
libraries = os.environ.get("PMM_LIBRARIES") if os.environ.get("PMM_LIBRARIES") else args.libraries
|
||||||
resume = os.environ.get("PMM_RESUME") if os.environ.get("PMM_RESUME") else args.resume
|
resume = os.environ.get("PMM_RESUME") if os.environ.get("PMM_RESUME") else args.resume
|
||||||
|
|
||||||
time_to_run = os.environ.get("PMM_TIME") if os.environ.get("PMM_TIME") else args.time
|
times_to_run = util.get_list(os.environ.get("PMM_TIME") if os.environ.get("PMM_TIME") else args.time)
|
||||||
if not re.match("^([0-1]?[0-9]|2[0-3]):[0-5][0-9]$", time_to_run):
|
for time_to_run in times_to_run:
|
||||||
|
if not re.match("^([0-1]?[0-9]|2[0-3]):[0-5][0-9]$", time_to_run):
|
||||||
raise util.Failed(f"Argument Error: time argument invalid: {time_to_run} must be in the HH:MM format")
|
raise util.Failed(f"Argument Error: time argument invalid: {time_to_run} must be in the HH:MM format")
|
||||||
|
|
||||||
util.separating_character = os.environ.get("PMM_DIVIDER")[0] if os.environ.get("PMM_DIVIDER") else args.divider[0]
|
util.separating_character = os.environ.get("PMM_DIVIDER")[0] if os.environ.get("PMM_DIVIDER") else args.divider[0]
|
||||||
|
@ -83,7 +88,7 @@ logger.addHandler(cmd_handler)
|
||||||
|
|
||||||
sys.excepthook = util.my_except_hook
|
sys.excepthook = util.my_except_hook
|
||||||
|
|
||||||
def start(config_path, is_test, daily, requested_collections, requested_libraries, resume_from):
|
def start(config_path, is_test=False, time_scheduled=None, requested_collections=None, requested_libraries=None, resume_from=None):
|
||||||
file_logger = os.path.join(default_dir, "logs", "meta.log")
|
file_logger = os.path.join(default_dir, "logs", "meta.log")
|
||||||
should_roll_over = os.path.isfile(file_logger)
|
should_roll_over = os.path.isfile(file_logger)
|
||||||
file_handler = logging.handlers.RotatingFileHandler(file_logger, delay=True, mode="w", backupCount=10, encoding="utf-8")
|
file_handler = logging.handlers.RotatingFileHandler(file_logger, delay=True, mode="w", backupCount=10, encoding="utf-8")
|
||||||
|
@ -101,16 +106,20 @@ def start(config_path, is_test, daily, requested_collections, requested_librarie
|
||||||
logger.info(util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| "))
|
logger.info(util.centered("|_| |_|\\___/_/\\_\\ |_| |_|\\___|\\__\\__,_| |_| |_|\\__,_|_| |_|\\__,_|\\__, |\\___|_| "))
|
||||||
logger.info(util.centered(" |___/ "))
|
logger.info(util.centered(" |___/ "))
|
||||||
logger.info(util.centered(" Version: 1.9.3-beta1 "))
|
logger.info(util.centered(" Version: 1.9.3-beta1 "))
|
||||||
if daily: start_type = "Daily "
|
if time_scheduled: start_type = f"{time_scheduled} "
|
||||||
elif is_test: start_type = "Test "
|
elif is_test: start_type = "Test "
|
||||||
elif requested_collections: start_type = "Collections "
|
elif requested_collections: start_type = "Collections "
|
||||||
elif requested_libraries: start_type = "Libraries "
|
elif requested_libraries: start_type = "Libraries "
|
||||||
else: start_type = ""
|
else: start_type = ""
|
||||||
start_time = datetime.now()
|
start_time = datetime.now()
|
||||||
|
if time_scheduled is None:
|
||||||
|
time_scheduled = start_time.strftime("%H:%M")
|
||||||
util.separator(f"Starting {start_type}Run")
|
util.separator(f"Starting {start_type}Run")
|
||||||
try:
|
try:
|
||||||
config = Config(default_dir, config_path, requested_libraries)
|
config = Config(default_dir, config_path=config_path, is_test=is_test,
|
||||||
update_libraries(config, is_test, requested_collections, resume_from)
|
time_scheduled=time_scheduled, requested_collections=requested_collections,
|
||||||
|
requested_libraries=requested_libraries, resume_from=resume_from)
|
||||||
|
update_libraries(config)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
util.print_stacktrace()
|
util.print_stacktrace()
|
||||||
logger.critical(e)
|
logger.critical(e)
|
||||||
|
@ -118,7 +127,7 @@ def start(config_path, is_test, daily, requested_collections, requested_librarie
|
||||||
util.separator(f"Finished {start_type}Run\nRun Time: {str(datetime.now() - start_time).split('.')[0]}")
|
util.separator(f"Finished {start_type}Run\nRun Time: {str(datetime.now() - start_time).split('.')[0]}")
|
||||||
logger.removeHandler(file_handler)
|
logger.removeHandler(file_handler)
|
||||||
|
|
||||||
def update_libraries(config, is_test, requested_collections, resume_from):
|
def update_libraries(config):
|
||||||
for library in config.libraries:
|
for library in config.libraries:
|
||||||
os.makedirs(os.path.join(default_dir, "logs", library.mapping_name, "collections"), exist_ok=True)
|
os.makedirs(os.path.join(default_dir, "logs", library.mapping_name, "collections"), exist_ok=True)
|
||||||
col_file_logger = os.path.join(default_dir, "logs", library.mapping_name, "library.log")
|
col_file_logger = os.path.join(default_dir, "logs", library.mapping_name, "library.log")
|
||||||
|
@ -136,29 +145,29 @@ def update_libraries(config, is_test, requested_collections, resume_from):
|
||||||
util.separator(f"Mapping {library.name} Library", space=False, border=False)
|
util.separator(f"Mapping {library.name} Library", space=False, border=False)
|
||||||
logger.info("")
|
logger.info("")
|
||||||
movie_map, show_map = map_guids(config, library)
|
movie_map, show_map = map_guids(config, library)
|
||||||
if not is_test and not resume_from and not collection_only and library.mass_update:
|
if not config.test_mode and not config.resume_from and not collection_only and library.mass_update:
|
||||||
mass_metadata(config, library, movie_map, show_map)
|
mass_metadata(config, library, movie_map, show_map)
|
||||||
for metadata in library.metadata_files:
|
for metadata in library.metadata_files:
|
||||||
logger.info("")
|
logger.info("")
|
||||||
util.separator(f"Running Metadata File\n{metadata.path}")
|
util.separator(f"Running Metadata File\n{metadata.path}")
|
||||||
if not is_test and not resume_from and not collection_only:
|
if not config.test_mode and not config.resume_from and not collection_only:
|
||||||
try:
|
try:
|
||||||
metadata.update_metadata(config.TMDb, is_test)
|
metadata.update_metadata(config.TMDb, config.test_mode)
|
||||||
except Failed as e:
|
except Failed as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
collections_to_run = metadata.get_collections(requested_collections)
|
collections_to_run = metadata.get_collections(config.requested_collections)
|
||||||
if resume_from and resume_from not in collections_to_run:
|
if config.resume_from and config.resume_from not in collections_to_run:
|
||||||
logger.info("")
|
logger.info("")
|
||||||
logger.warning(f"Collection: {resume_from} not in Metadata File: {metadata.path}")
|
logger.warning(f"Collection: {config.resume_from} not in Metadata File: {metadata.path}")
|
||||||
continue
|
continue
|
||||||
if collections_to_run and not library_only:
|
if collections_to_run and not library_only:
|
||||||
logger.info("")
|
logger.info("")
|
||||||
util.separator(f"{'Test ' if is_test else ''}Collections")
|
util.separator(f"{'Test ' if config.test_mode else ''}Collections")
|
||||||
logger.removeHandler(library_handler)
|
logger.removeHandler(library_handler)
|
||||||
resume_from = run_collection(config, library, metadata, collections_to_run, is_test, resume_from, movie_map, show_map)
|
run_collection(config, library, metadata, movie_map, show_map)
|
||||||
logger.addHandler(library_handler)
|
logger.addHandler(library_handler)
|
||||||
|
|
||||||
if not is_test and not requested_collections and ((library.show_unmanaged and not library_only) or (library.assets_for_all and not collection_only)):
|
if not config.test_mode and not config.requested_collections and ((library.show_unmanaged and not library_only) or (library.assets_for_all and not collection_only)):
|
||||||
logger.info("")
|
logger.info("")
|
||||||
util.separator(f"Other {library.name} Library Operations")
|
util.separator(f"Other {library.name} Library Operations")
|
||||||
unmanaged_collections = []
|
unmanaged_collections = []
|
||||||
|
@ -240,9 +249,11 @@ def map_guids(config, library):
|
||||||
movie_map = {}
|
movie_map = {}
|
||||||
show_map = {}
|
show_map = {}
|
||||||
length = 0
|
length = 0
|
||||||
logger.info(f"Mapping {'Movie' if library.is_movie else 'Show'} Library: {library.name}")
|
logger.info(f"Loading {'Movie' if library.is_movie else 'Show'} Library: {library.name}")
|
||||||
logger.info("")
|
logger.info("")
|
||||||
items = library.Plex.all()
|
items = library.Plex.all()
|
||||||
|
logger.info(f"Mapping {'Movie' if library.is_movie else 'Show'} Library: {library.name}")
|
||||||
|
logger.info("")
|
||||||
for i, item in enumerate(items, 1):
|
for i, item in enumerate(items, 1):
|
||||||
length = util.print_return(length, f"Processing: {i}/{len(items)} {item.title}")
|
length = util.print_return(length, f"Processing: {i}/{len(items)} {item.title}")
|
||||||
id_type, main_id = config.Convert.get_id(item, library, length)
|
id_type, main_id = config.Convert.get_id(item, library, length)
|
||||||
|
@ -379,11 +390,11 @@ def mass_metadata(config, library, movie_map, show_map):
|
||||||
except Failed as e:
|
except Failed as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
|
|
||||||
def run_collection(config, library, metadata, requested_collections, is_test, resume_from, movie_map, show_map):
|
def run_collection(config, library, metadata, movie_map, show_map):
|
||||||
logger.info("")
|
logger.info("")
|
||||||
for mapping_name, collection_attrs in requested_collections.items():
|
for mapping_name, collection_attrs in config.requested_collections.items():
|
||||||
collection_start = datetime.now()
|
collection_start = datetime.now()
|
||||||
if is_test and ("test" not in collection_attrs or collection_attrs["test"] is not True):
|
if config.test_mode and ("test" not in collection_attrs or collection_attrs["test"] is not True):
|
||||||
no_template_test = True
|
no_template_test = True
|
||||||
if "template" in collection_attrs and collection_attrs["template"]:
|
if "template" in collection_attrs and collection_attrs["template"]:
|
||||||
for data_template in util.get_list(collection_attrs["template"], split=False):
|
for data_template in util.get_list(collection_attrs["template"], split=False):
|
||||||
|
@ -398,10 +409,10 @@ def run_collection(config, library, metadata, requested_collections, is_test, re
|
||||||
if no_template_test:
|
if no_template_test:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if resume_from and resume_from != mapping_name:
|
if config.resume_from and config.resume_from != mapping_name:
|
||||||
continue
|
continue
|
||||||
elif resume_from == mapping_name:
|
elif config.resume_from == mapping_name:
|
||||||
resume_from = None
|
config.resume_from = None
|
||||||
logger.info("")
|
logger.info("")
|
||||||
util.separator(f"Resuming Collections")
|
util.separator(f"Resuming Collections")
|
||||||
|
|
||||||
|
@ -481,27 +492,32 @@ def run_collection(config, library, metadata, requested_collections, is_test, re
|
||||||
logger.info("")
|
logger.info("")
|
||||||
util.separator(f"Finished {mapping_name} Collection\nCollection Run Time: {str(datetime.now() - collection_start).split('.')[0]}")
|
util.separator(f"Finished {mapping_name} Collection\nCollection Run Time: {str(datetime.now() - collection_start).split('.')[0]}")
|
||||||
logger.removeHandler(collection_handler)
|
logger.removeHandler(collection_handler)
|
||||||
return resume_from
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if run or test or collections or libraries or resume:
|
if run or test or collections or libraries or resume:
|
||||||
start(config_file, test, False, collections, libraries, resume)
|
start(config_file, is_test=test, requested_collections=collections, requested_libraries=libraries, resume_from=resume)
|
||||||
else:
|
else:
|
||||||
time_length = 0
|
time_length = 0
|
||||||
schedule.every().day.at(time_to_run).do(start, config_file, False, True, None, None, None)
|
for time_to_run in times_to_run:
|
||||||
|
schedule.every().day.at(time_to_run).do(start, config_file, time_scheduled=time_to_run)
|
||||||
while True:
|
while True:
|
||||||
schedule.run_pending()
|
schedule.run_pending()
|
||||||
if not no_countdown:
|
if not no_countdown:
|
||||||
current = datetime.now().strftime("%H:%M")
|
current = datetime.now().strftime("%H:%M")
|
||||||
seconds = (datetime.strptime(time_to_run, "%H:%M") - datetime.strptime(current, "%H:%M")).total_seconds()
|
seconds = None
|
||||||
|
og_time_str = ""
|
||||||
|
for time_to_run in times_to_run:
|
||||||
|
new_seconds = (datetime.strptime(time_to_run, "%H:%M") - datetime.strptime(current, "%H:%M")).total_seconds()
|
||||||
|
if new_seconds < 0:
|
||||||
|
new_seconds += 86400
|
||||||
|
if (seconds is None or new_seconds < seconds) and new_seconds > 0:
|
||||||
|
seconds = new_seconds
|
||||||
|
og_time_str = time_to_run
|
||||||
hours = int(seconds // 3600)
|
hours = int(seconds // 3600)
|
||||||
if hours < 0:
|
|
||||||
hours += 24
|
|
||||||
minutes = int((seconds % 3600) // 60)
|
minutes = int((seconds % 3600) // 60)
|
||||||
time_str = f"{hours} Hour{'s' if hours > 1 else ''} and " if hours > 0 else ""
|
time_str = f"{hours} Hour{'s' if hours > 1 else ''} and " if hours > 0 else ""
|
||||||
time_str += f"{minutes} Minute{'s' if minutes > 1 else ''}"
|
time_str += f"{minutes} Minute{'s' if minutes > 1 else ''}"
|
||||||
|
time_length = util.print_return(time_length, f"Current Time: {current} | {time_str} until the next run at {og_time_str} {times_to_run}")
|
||||||
time_length = util.print_return(time_length, f"Current Time: {current} | {time_str} until the daily run at {time_to_run}")
|
time.sleep(60)
|
||||||
time.sleep(1)
|
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
util.separator("Exiting Plex Meta Manager")
|
util.separator("Exiting Plex Meta Manager")
|
||||||
|
|
Loading…
Reference in a new issue