[81] #687 add --cache-libraries

This commit is contained in:
meisnate12 2022-05-10 11:47:13 -04:00
parent ff0def5065
commit 3202274f4c
7 changed files with 113 additions and 35 deletions

View file

@ -1 +1 @@
1.16.5-develop80
1.16.5-develop81

View file

@ -21,6 +21,7 @@ These docs are assuming you have a basic understanding of Docker concepts. One
| [Libraries First](#libraries-first) | `-lf` or `--libraries-first` | `PMM_LIBRARIES_FIRST` |
| [Ignore Schedules](#ignore-schedules) | `-is` or `--ignore-schedules` | `PMM_IGNORE_SCHEDULES` |
| [Ignore Ghost](#ignore-ghost) | `-ig` or `--ignore-ghost` | `PMM_IGNORE_GHOST` |
| [Cache Libraries](#cache-libraries) | `-ca` or `--cache-libraries` | `PMM_CACHE_LIBRARIES` |
| [Delete Collections](#delete-collections) | `-dc` or `--delete-collections` | `PMM_DELETE_COLLECTIONS` |
| [Resume Run](#resume-run) | `-re` or `--resume` | `PMM_RESUME` |
| [No Countdown](#no-countdown) | `-nc` or `--no-countdown` | `PMM_NO_COUNTDOWN` |
@ -574,6 +575,45 @@ docker run -it -v "X:\Media\Plex Meta Manager\config:/config:rw" meisnate12/plex
</details>
### Cache Libraries
Cache the library Load for 1 day.
<table class="dualTable colwidths-auto align-default table">
<tr>
<th style="background-color: #222;"></th>
<th>Shell</th>
<th>Environment</th>
</tr>
<tr>
<th>Flags</th>
<td><code>-ca</code> or <code>--cache-libraries</code></td>
<td><code>PMM_CACHE_LIBRARIES</code></td>
</tr>
<tr>
<th>Example</th>
<td><code>--cache-libraries</code></td>
<td><code>PMM_CACHE_LIBRARIES=true</code></td>
</tr>
</table>
<details>
<summary>Local Environment</summary>
```shell
python plex_meta_manager.py --cache-libraries
```
</details>
<details>
<summary>Docker Environment</summary>
```shell
docker run -it -v "X:\Media\Plex Meta Manager\config:/config:rw" meisnate12/plex-meta-manager --cache-libraries
```
</details>
### Delete Collections
Delete all collections in a Library prior to running collections/operations.

View file

@ -2342,7 +2342,7 @@ class CollectionBuilder:
logger.error(e)
if self.run_again:
self.run_again_shows.extend(missing_tvdb_ids)
if len(self.missing_parts) > 0 and self.library.is_show and self.details["save_missing"] is True:
if len(self.missing_parts) > 0 and self.library.is_show and self.details["show_missing"] is True:
for missing in self.missing_parts:
logger.info(f"{self.name} {self.Type} | X | {missing}")
return added_to_radarr, added_to_sonarr

View file

@ -740,15 +740,6 @@ class Cache:
with closing(connection.cursor()) as cursor:
cursor.execute(f"INSERT OR IGNORE INTO {arr}_adds({id_type}, library) VALUES(?, ?)", (t_id, library))
def update_list_ids(self, list_key, media_ids):
final_ids = []
for media_id, media_type in media_ids:
final_ids.append((list_key, media_id, media_type))
with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row
with closing(connection.cursor()) as cursor:
cursor.executemany(f"INSERT OR IGNORE INTO list_ids(list_key, media_id, media_type) VALUES(?, ?, ?)", final_ids)
def update_list_cache(self, list_type, list_data, expired, expiration):
list_key = None
expiration_date = datetime.now() if expired is True else (datetime.now() - timedelta(days=expiration))
@ -778,6 +769,15 @@ class Cache:
expired = time_between_insertion.days > expiration
return list_key, expired
def update_list_ids(self, list_key, media_ids):
final_ids = []
for media_id, media_type in media_ids:
final_ids.append((list_key, media_id, media_type))
with sqlite3.connect(self.cache_path) as connection:
connection.row_factory = sqlite3.Row
with closing(connection.cursor()) as cursor:
cursor.executemany(f"INSERT OR IGNORE INTO list_ids(list_key, media_id, media_type) VALUES(?, ?, ?)", final_ids)
def query_list_ids(self, list_key):
ids = []
with sqlite3.connect(self.cache_path) as connection:

View file

@ -204,25 +204,36 @@ class Convert:
else:
return None
def ids_from_cache(self, ratingKey, guid, item_type, check_id, library):
media_id_type = None
cache_id = None
imdb_check = None
expired = None
if self.config.Cache:
cache_id, imdb_check, media_type, expired = self.config.Cache.query_guid_map(guid)
if (cache_id or imdb_check) and not expired:
media_id_type = "movie" if "movie" in media_type else "show"
if item_type == "hama" and check_id.startswith("anidb"):
anidb_id = int(re.search("-(.*)", check_id).group(1))
library.anidb_map[anidb_id] = ratingKey
elif item_type == "myanimelist":
library.mal_map[int(check_id)] = ratingKey
return media_id_type, cache_id, imdb_check, expired
def scan_guid(self, guid_str):
guid = requests.utils.urlparse(guid_str)
return guid.scheme.split(".")[-1], guid.netloc
def get_id(self, item, library):
expired = None
tmdb_id = []
tvdb_id = []
imdb_id = []
anidb_id = None
guid = requests.utils.urlparse(item.guid)
item_type = guid.scheme.split(".")[-1]
check_id = guid.netloc
if self.config.Cache:
cache_id, imdb_check, media_type, expired = self.config.Cache.query_guid_map(item.guid)
if (cache_id or imdb_check) and not expired:
media_id_type = "movie" if "movie" in media_type else "show"
if item_type == "hama" and check_id.startswith("anidb"):
anidb_id = int(re.search("-(.*)", check_id).group(1))
library.anidb_map[anidb_id] = item.ratingKey
elif item_type == "myanimelist":
library.mal_map[int(check_id)] = item.ratingKey
return media_id_type, cache_id, imdb_check
item_type, check_id = self.scan_guid(item.guid)
media_id_type, cache_id, imdb_check, expired = self.ids_from_cache(item.ratingKey, item.guid, item_type, check_id, library)
if (cache_id or imdb_check) and expired is False:
return media_id_type, cache_id, imdb_check
try:
if item_type == "plex":
try:

View file

@ -236,17 +236,27 @@ class Library(ABC):
def map_guids(self, items):
for i, item in enumerate(items, 1):
logger.ghost(f"Processing: {i}/{len(items)} {item.title}")
if item.ratingKey not in self.movie_rating_key_map and item.ratingKey not in self.show_rating_key_map:
id_type, main_id, imdb_id = self.config.Convert.get_id(item, self)
if isinstance(item, tuple):
logger.ghost(f"Processing: {i}/{len(items)}")
key, guid = item
else:
logger.ghost(f"Processing: {i}/{len(items)} {item.title}")
key = item.ratingKey
guid = item.guid
if key not in self.movie_rating_key_map and key not in self.show_rating_key_map:
if isinstance(item, tuple):
item_type, check_id = self.config.Convert.scan_guid(guid)
id_type, main_id, imdb_id, _ = self.config.Convert.ids_from_cache(key, guid, item_type, check_id, self)
else:
id_type, main_id, imdb_id = self.config.Convert.get_id(item, self)
if main_id:
if id_type == "movie":
self.movie_rating_key_map[item.ratingKey] = main_id[0]
util.add_dict_list(main_id, item.ratingKey, self.movie_map)
self.movie_rating_key_map[key] = main_id[0]
util.add_dict_list(main_id, key, self.movie_map)
elif id_type == "show":
self.show_rating_key_map[item.ratingKey] = main_id[0]
util.add_dict_list(main_id, item.ratingKey, self.show_map)
self.show_rating_key_map[key] = main_id[0]
util.add_dict_list(main_id, key, self.show_map)
if imdb_id:
util.add_dict_list(imdb_id, item.ratingKey, self.imdb_map)
util.add_dict_list(imdb_id, key, self.imdb_map)
logger.info("")
logger.info(f"Processed {len(items)} {self.type}s")

View file

@ -32,6 +32,7 @@ parser.add_argument("-lf", "--library-first", "--libraries-first", dest="library
parser.add_argument("-rc", "-cl", "--collection", "--collections", "--run-collection", "--run-collections", dest="collections", help="Process only specified collections (comma-separated list)", type=str)
parser.add_argument("-rl", "-l", "--library", "--libraries", "--run-library", "--run-libraries", dest="libraries", help="Process only specified libraries (comma-separated list)", type=str)
parser.add_argument("-rm", "-m", "--metadata", "--metadata-files", "--run-metadata-files", dest="metadata", help="Process only specified Metadata files (comma-separated list)", type=str)
parser.add_argument("-ca", "--cache-library", "--cache-libraries", dest="cache_libraries", help="Cache Library load for 1 day", action="store_true", default=False)
parser.add_argument("-dc", "--delete", "--delete-collections", dest="delete", help="Deletes all Collections in the Plex Library before running", action="store_true", default=False)
parser.add_argument("-nc", "--no-countdown", dest="no_countdown", help="Run without displaying the countdown", action="store_true", default=False)
parser.add_argument("-nm", "--no-missing", dest="no_missing", help="Run without running the missing section", action="store_true", default=False)
@ -76,6 +77,7 @@ library_first = get_arg("PMM_LIBRARIES_FIRST", args.library_first, arg_bool=True
collections = get_arg("PMM_COLLECTIONS", args.collections)
libraries = get_arg("PMM_LIBRARIES", args.libraries)
metadata_files = get_arg("PMM_METADATA_FILES", args.metadata)
cache_libraries = get_arg("PMM_CACHE_LIBRARIES", args.cache_libraries, arg_bool=True)
delete = get_arg("PMM_DELETE_COLLECTIONS", args.delete, arg_bool=True)
resume = get_arg("PMM_RESUME", args.resume)
no_countdown = get_arg("PMM_NO_COUNTDOWN", args.no_countdown, arg_bool=True)
@ -275,14 +277,29 @@ def update_libraries(config):
library.query(collection.delete)
library_status[library.name]["All Collections Deleted"] = str(datetime.now() - time_start).split('.')[0]
temp_items = library.cache_items()
time_start = datetime.now()
temp_items = None
list_key = None
expired = None
if config.Cache and cache_libraries:
list_key, expired = config.Cache.query_list_cache("library", library.mapping_name, 1)
if list_key and expired is False:
logger.info(f"Library: {library.mapping_name} loaded from Cache")
temp_items = config.Cache.query_list_ids(list_key)
if not temp_items:
temp_items = library.cache_items()
if config.Cache and cache_libraries:
if list_key:
config.Cache.delete_list_ids(list_key)
list_key = config.Cache.update_list_cache("library", library.mapping_name, expired, 1)
config.Cache.update_list_ids(list_key, [(i.ratingKey, i.guid) for i in temp_items])
if not library.is_other and not library.is_music:
time_start = datetime.now()
logger.info("")
logger.separator(f"Mapping {library.name} Library", space=False, border=False)
logger.info("")
library.map_guids(temp_items)
library_status[library.name]["Library Loading and Mapping"] = str(datetime.now() - time_start).split('.')[0]
library_status[library.name]["Library Loading and Mapping"] = str(datetime.now() - time_start).split('.')[0]
if config.library_first and not config.test_mode and not collection_only:
if not overlays_only and library.library_operation: