mirror of
https://github.com/meisnate12/Plex-Meta-Manager
synced 2024-11-10 06:54:21 +00:00
[5] Switched from retrying to tenacity for http request retries (#2105)
This commit is contained in:
parent
dcf0435a96
commit
1ff0f8492f
17 changed files with 138 additions and 81 deletions
13
.github/workflows/increment-build.yml
vendored
13
.github/workflows/increment-build.yml
vendored
|
@ -15,6 +15,7 @@ jobs:
|
||||||
commit-msg: ${{ steps.update-version.outputs.commit-msg }}
|
commit-msg: ${{ steps.update-version.outputs.commit-msg }}
|
||||||
commit-hash: ${{ steps.update-version.outputs.commit-hash }}
|
commit-hash: ${{ steps.update-version.outputs.commit-hash }}
|
||||||
commit-short: ${{ steps.update-version.outputs.commit-short }}
|
commit-short: ${{ steps.update-version.outputs.commit-short }}
|
||||||
|
pr-tag: ${{ steps.update-version.outputs.pr-tag }}
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Create App Token
|
- name: Create App Token
|
||||||
|
@ -34,6 +35,16 @@ jobs:
|
||||||
- name: Update VERSION File
|
- name: Update VERSION File
|
||||||
id: update-version
|
id: update-version
|
||||||
run: |
|
run: |
|
||||||
|
branch_name=${{ github.event.pull_request.head.ref }}
|
||||||
|
repo_name=${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
base_name="${repo_name%/*}"
|
||||||
|
if [[ "${branch_name}" =~ ^(master|develop|nightly)$ ]]; then
|
||||||
|
pr_tag="${base_name}"
|
||||||
|
else
|
||||||
|
pr_tag="${branch_name}"
|
||||||
|
fi
|
||||||
|
echo "pr-tag=${pr_tag}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
value=$(cat VERSION)
|
value=$(cat VERSION)
|
||||||
old_msg=$(git log -1 HEAD --pretty=format:%s)
|
old_msg=$(git log -1 HEAD --pretty=format:%s)
|
||||||
version="${value%-build*}"
|
version="${value%-build*}"
|
||||||
|
@ -192,4 +203,4 @@ jobs:
|
||||||
curl -i -X DELETE \
|
curl -i -X DELETE \
|
||||||
-H "Accept: application/json" \
|
-H "Accept: application/json" \
|
||||||
-H "Authorization: JWT $HUB_TOKEN" \
|
-H "Authorization: JWT $HUB_TOKEN" \
|
||||||
https://hub.docker.com/v2/repositories/kometateam/kometa/tags/${{ github.head_ref }}/
|
https://hub.docker.com/v2/repositories/kometateam/kometa/tags/${{ needs.increment-build.outputs.pr-tag }}/
|
38
.github/workflows/validate-pull.yml
vendored
38
.github/workflows/validate-pull.yml
vendored
|
@ -32,10 +32,12 @@ jobs:
|
||||||
docker-build-pull:
|
docker-build-pull:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: [ validate-pull ]
|
needs: [ validate-pull ]
|
||||||
if: contains(github.event.pull_request.labels.*.name, 'docker') || contains(github.event.pull_request.labels.*.name, 'testers')
|
if: contains(github.event.pull_request.labels.*.name, 'docker') || contains(github.event.pull_request.labels.*.name, 'tester')
|
||||||
outputs:
|
outputs:
|
||||||
commit-msg: ${{ steps.update-version.outputs.commit-msg }}
|
commit-msg: ${{ steps.update-version.outputs.commit-msg }}
|
||||||
version: ${{ steps.update-version.outputs.version }}
|
version: ${{ steps.update-version.outputs.version }}
|
||||||
|
tag-name: ${{ steps.update-version.outputs.tag-name }}
|
||||||
|
extra-text: ${{ steps.update-version.outputs.extra-text }}
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Create App Token
|
- name: Create App Token
|
||||||
|
@ -54,6 +56,23 @@ jobs:
|
||||||
- name: Update VERSION File
|
- name: Update VERSION File
|
||||||
id: update-version
|
id: update-version
|
||||||
run: |
|
run: |
|
||||||
|
branch_name=${{ github.event.pull_request.head.ref }}
|
||||||
|
repo_name=${{ github.event.pull_request.head.repo.full_name }}
|
||||||
|
base_name="${repo_name%/*}"
|
||||||
|
if [[ "${branch_name}" =~ ^(master|develop|nightly)$ ]]; then
|
||||||
|
tag_name="${base_name}"
|
||||||
|
else
|
||||||
|
tag_name="${branch_name}"
|
||||||
|
fi
|
||||||
|
echo "tag-name=${tag_name}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
if [[ "${base_name}" == "Kometa-Team" ]]; then
|
||||||
|
extra=""
|
||||||
|
else
|
||||||
|
extra=" from the ${{ github.event.pull_request.head.repo.full_name }} repo"
|
||||||
|
fi
|
||||||
|
echo "extra-text=${extra}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
value=$(cat VERSION)
|
value=$(cat VERSION)
|
||||||
old_msg=$(git log -1 HEAD --pretty=format:%s)
|
old_msg=$(git log -1 HEAD --pretty=format:%s)
|
||||||
echo "commit-msg=${old_msg}" >> $GITHUB_OUTPUT
|
echo "commit-msg=${old_msg}" >> $GITHUB_OUTPUT
|
||||||
|
@ -80,7 +99,7 @@ jobs:
|
||||||
git config --local user.email "action@github.com"
|
git config --local user.email "action@github.com"
|
||||||
git config --local user.name "GitHub Action"
|
git config --local user.name "GitHub Action"
|
||||||
git add VERSION
|
git add VERSION
|
||||||
git commit -m "Part: ${part_value}"
|
git commit -m "${tag_name} Part: ${part_value}"
|
||||||
git push
|
git push
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
|
@ -105,10 +124,10 @@ jobs:
|
||||||
context: ./
|
context: ./
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
build-args: |
|
build-args: |
|
||||||
"BRANCH_NAME=${{ github.event.pull_request.head.ref }}"
|
"BRANCH_NAME=${{ steps.update-version.outputs.tag-name }}"
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
push: true
|
push: true
|
||||||
tags: kometateam/kometa:${{ github.event.pull_request.head.ref }}
|
tags: kometateam/kometa:${{ steps.update-version.outputs.tag-name }}
|
||||||
cache-from: type=gha
|
cache-from: type=gha
|
||||||
cache-to: type=gha,mode=max
|
cache-to: type=gha,mode=max
|
||||||
|
|
||||||
|
@ -117,7 +136,7 @@ jobs:
|
||||||
if: success()
|
if: success()
|
||||||
with:
|
with:
|
||||||
webhook_id_token: ${{ secrets.BUILD_WEBHOOK }}
|
webhook_id_token: ${{ secrets.BUILD_WEBHOOK }}
|
||||||
title: "${{ vars.REPO_NAME }} ${{ github.event.pull_request.head.ref }}: ${{ vars.TEXT_SUCCESS }}"
|
title: "${{ vars.REPO_NAME }} ${{ steps.update-version.outputs.tag-name }}: ${{ vars.TEXT_SUCCESS }}"
|
||||||
url: https://github.com/Kometa-Team/${{ vars.REPO_NAME }}/actions/runs/${{ github.run_id }}
|
url: https://github.com/Kometa-Team/${{ vars.REPO_NAME }}/actions/runs/${{ github.run_id }}
|
||||||
color: ${{ vars.COLOR_SUCCESS }}
|
color: ${{ vars.COLOR_SUCCESS }}
|
||||||
username: ${{ vars.BOT_NAME }}
|
username: ${{ vars.BOT_NAME }}
|
||||||
|
@ -131,7 +150,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
webhook_id_token: ${{ secrets.BUILD_WEBHOOK }}
|
webhook_id_token: ${{ secrets.BUILD_WEBHOOK }}
|
||||||
message: ${{ vars.BUILD_FAILURE_ROLE }}
|
message: ${{ vars.BUILD_FAILURE_ROLE }}
|
||||||
title: "${{ vars.REPO_NAME }} ${{ github.event.pull_request.head.ref }}: ${{ vars.TEXT_FAILURE }}"
|
title: "${{ vars.REPO_NAME }} ${{ steps.update-version.outputs.tag-name }}: ${{ vars.TEXT_FAILURE }}"
|
||||||
url: https://github.com/Kometa-Team/${{ vars.REPO_NAME }}/actions/runs/${{ github.run_id }}
|
url: https://github.com/Kometa-Team/${{ vars.REPO_NAME }}/actions/runs/${{ github.run_id }}
|
||||||
color: ${{ vars.COLOR_FAILURE }}
|
color: ${{ vars.COLOR_FAILURE }}
|
||||||
username: ${{ vars.BOT_NAME }}
|
username: ${{ vars.BOT_NAME }}
|
||||||
|
@ -141,6 +160,7 @@ jobs:
|
||||||
|
|
||||||
notify-testers:
|
notify-testers:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
needs: [ docker-build-pull ]
|
||||||
if: github.event.action == 'labeled' && github.event.label.name == 'tester'
|
if: github.event.action == 'labeled' && github.event.label.name == 'tester'
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
|
@ -158,9 +178,9 @@ jobs:
|
||||||
webhook_id_token: ${{ secrets.TESTERS_WEBHOOK }}
|
webhook_id_token: ${{ secrets.TESTERS_WEBHOOK }}
|
||||||
message: "The Kometa team are requesting <@&917323027438510110> to assist with testing an upcoming feature/bug fix.
|
message: "The Kometa team are requesting <@&917323027438510110> to assist with testing an upcoming feature/bug fix.
|
||||||
|
|
||||||
* For Local Git pull and checkout the `${{ github.event.pull_request.head.ref }}` branch
|
* For Local Git pull and checkout the `${{ github.event.pull_request.head.ref }}` branch${{ needs.docker-build-pull.outputs.extra-text }}
|
||||||
|
|
||||||
* For Docker use the `kometateam/kometa:${{ github.event.pull_request.head.ref }}` image to do your testing
|
* For Docker use the `kometateam/kometa:${{ needs.docker-build-pull.outputs.tag-name }}` image to do your testing
|
||||||
|
|
||||||
Please report back either here or on the original GitHub Pull Request"
|
Please report back either here or on the original GitHub Pull Request"
|
||||||
title: ${{ github.event.pull_request.title }}
|
title: ${{ github.event.pull_request.title }}
|
||||||
|
@ -182,7 +202,7 @@ jobs:
|
||||||
uses: Kometa-Team/discord-notifications@master
|
uses: Kometa-Team/discord-notifications@master
|
||||||
with:
|
with:
|
||||||
webhook_id_token: ${{ secrets.TESTERS_WEBHOOK }}
|
webhook_id_token: ${{ secrets.TESTERS_WEBHOOK }}
|
||||||
message: "New Commit Pushed to `${{ github.event.pull_request.head.ref }}`: ${{ needs.docker-build-pull.outputs.version }}"
|
message: "New Commit Pushed to `${{ needs.docker-build-pull.outputs.tag-name }}`: ${{ needs.docker-build-pull.outputs.version }}"
|
||||||
description: ${{ needs.docker-build-pull.outputs.commit-msg }}
|
description: ${{ needs.docker-build-pull.outputs.commit-msg }}
|
||||||
url: https://github.com/Kometa-Team/${{ vars.REPO_NAME }}/pull/${{ github.event.number }}
|
url: https://github.com/Kometa-Team/${{ vars.REPO_NAME }}/pull/${{ github.event.number }}
|
||||||
color: ${{ vars.COLOR_SUCCESS }}
|
color: ${{ vars.COLOR_SUCCESS }}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# Requirements Update (requirements will need to be reinstalled)
|
# Requirements Update (requirements will need to be reinstalled)
|
||||||
|
Added tenacity requirement at 8.3.0
|
||||||
|
|
||||||
# Removed Features
|
# Removed Features
|
||||||
|
|
||||||
|
|
2
VERSION
2
VERSION
|
@ -1 +1 @@
|
||||||
2.0.2-build4
|
2.0.2-build5
|
||||||
|
|
|
@ -61,6 +61,8 @@ These are the developers and creators of the technologies that are required to m
|
||||||
| [meisnate12](https://github.com/meisnate12) | Creator of [ArrAPI](https://github.com/Kometa-Team/ArrAPI) and [TMDbAPIs](https://github.com/Kometa-Team/TMDbAPIs) | [Click Here](https://github.com/sponsors/meisnate12) |
|
| [meisnate12](https://github.com/meisnate12) | Creator of [ArrAPI](https://github.com/Kometa-Team/ArrAPI) and [TMDbAPIs](https://github.com/Kometa-Team/TMDbAPIs) | [Click Here](https://github.com/sponsors/meisnate12) |
|
||||||
| [dbader](https://github.com/dbader) | Creator of [schedule](https://github.com/dbader/schedule) | :fontawesome-solid-circle-xmark:{ .red } |
|
| [dbader](https://github.com/dbader) | Creator of [schedule](https://github.com/dbader/schedule) | :fontawesome-solid-circle-xmark:{ .red } |
|
||||||
| [rholder](https://github.com/rholder) | Creator of [retrying](https://github.com/rholder/retrying) | :fontawesome-solid-circle-xmark:{ .red } |
|
| [rholder](https://github.com/rholder) | Creator of [retrying](https://github.com/rholder/retrying) | :fontawesome-solid-circle-xmark:{ .red } |
|
||||||
|
| [jd](https://github.com/jd) | Creator of [tenacity](https://github.com/jd/tenacity) | :fontawesome-solid-circle-xmark:{ .red } |
|
||||||
|
|
||||||
|
|
||||||
## Other Acknowledgements
|
## Other Acknowledgements
|
||||||
|
|
||||||
|
|
|
@ -374,10 +374,10 @@ Collecting PlexAPI==4.7.0
|
||||||
Collecting tmdbv3api==1.7.6
|
Collecting tmdbv3api==1.7.6
|
||||||
Downloading tmdbv3api-1.7.6-py2.py3-none-any.whl (17 kB)
|
Downloading tmdbv3api-1.7.6-py2.py3-none-any.whl (17 kB)
|
||||||
...
|
...
|
||||||
Installing collected packages: urllib3, idna, charset-normalizer, certifi, six, ruamel.yaml.clib, requests, tmdbv3api, schedule, ruamel.yaml, retrying, PlexAPI, pillow, pathvalidate, lxml, arrapi
|
Installing collected packages: urllib3, idna, charset-normalizer, certifi, six, ruamel.yaml.clib, requests, tmdbv3api, tenacity, ruamel.yaml, tenacity, PlexAPI, pillow, pathvalidate, lxml, arrapi
|
||||||
Running setup.py install for retrying ... done
|
Running setup.py install for tenacity ... done
|
||||||
Running setup.py install for arrapi ... done
|
Running setup.py install for arrapi ... done
|
||||||
Successfully installed PlexAPI-4.7.0 arrapi-1.1.3 certifi-2021.10.8 charset-normalizer-2.0.7 idna-3.3 lxml-4.6.3 pathvalidate-2.4.1 pillow-8.3.2 requests-2.26.0 retrying-1.3.3 ruamel.yaml-0.17.10 ruamel.yaml.clib-0.2.6 schedule-1.1.0 six-1.16.0 tmdbv3api-1.7.6 urllib3-1.26.7
|
Successfully installed PlexAPI-4.7.0 arrapi-1.1.3 certifi-2021.10.8 charset-normalizer-2.0.7 idna-3.3 lxml-4.6.3 pathvalidate-2.4.1 pillow-8.3.2 requests-2.26.0 tenacity-8.3.0 ruamel.yaml-0.17.10 ruamel.yaml.clib-0.2.6 tenacity-8.3.0 six-1.16.0 tmdbv3api-1.7.6 urllib3-1.26.7
|
||||||
WARNING: You are using pip version 21.1.3; however, version 21.3 is available.
|
WARNING: You are using pip version 21.1.3; however, version 21.3 is available.
|
||||||
You should consider upgrading via the '/Users/mroche/Kometa/kometa-venv/bin/python -m pip install --upgrade pip' command.
|
You should consider upgrading via the '/Users/mroche/Kometa/kometa-venv/bin/python -m pip install --upgrade pip' command.
|
||||||
```
|
```
|
||||||
|
|
|
@ -31,7 +31,7 @@ system_versions = {
|
||||||
"python-dotenv": dotenv_version.__version__,
|
"python-dotenv": dotenv_version.__version__,
|
||||||
"python-dateutil": dateutil.__version__, # noqa
|
"python-dateutil": dateutil.__version__, # noqa
|
||||||
"requests": requests.__version__,
|
"requests": requests.__version__,
|
||||||
"retrying": None,
|
"tenacity": None,
|
||||||
"ruamel.yaml": ruamel.yaml.__version__,
|
"ruamel.yaml": ruamel.yaml.__version__,
|
||||||
"schedule": None,
|
"schedule": None,
|
||||||
"setuptools": setuptools.__version__,
|
"setuptools": setuptools.__version__,
|
||||||
|
|
|
@ -2001,12 +2001,13 @@ class MetadataFile(DataFile):
|
||||||
episodes[f"{available.month}-{available.day}"] = episode
|
episodes[f"{available.month}-{available.day}"] = episode
|
||||||
for episode_id, episode_dict in season_dict[season_methods["episodes"]].items():
|
for episode_id, episode_dict in season_dict[season_methods["episodes"]].items():
|
||||||
updated = False
|
updated = False
|
||||||
|
title_name = f"Episode: {episode_id} in Season: {season_id} of {mapping_name}"
|
||||||
logger.info("")
|
logger.info("")
|
||||||
logger.info(f"Updating episode {episode_id} in {season_id} of {mapping_name}...")
|
logger.info(f"Updating {title_name}...")
|
||||||
if episode_id in episodes:
|
if episode_id in episodes:
|
||||||
episode = episodes[episode_id]
|
episode = episodes[episode_id]
|
||||||
else:
|
else:
|
||||||
logger.error(f"{self.type_str} Error: Episode {episode_id} in Season {season_id} not found")
|
logger.error(f"{self.type_str} Error: {title_name} not found")
|
||||||
continue
|
continue
|
||||||
episode_methods = {em.lower(): em for em in episode_dict}
|
episode_methods = {em.lower(): em for em in episode_dict}
|
||||||
add_edit("title", episode, episode_dict, episode_methods)
|
add_edit("title", episode, episode_dict, episode_methods)
|
||||||
|
@ -2020,7 +2021,7 @@ class MetadataFile(DataFile):
|
||||||
for tag_edit in ["director", "writer", "label"]:
|
for tag_edit in ["director", "writer", "label"]:
|
||||||
if self.edit_tags(tag_edit, episode, episode_dict, episode_methods):
|
if self.edit_tags(tag_edit, episode, episode_dict, episode_methods):
|
||||||
updated = True
|
updated = True
|
||||||
finish_edit(episode, f"Episode: {episode_id} in Season: {season_id}")
|
finish_edit(episode, title_name)
|
||||||
episode_style_data = None
|
episode_style_data = None
|
||||||
if season_style_data and "episodes" in season_style_data and season_style_data["episodes"] and episode_id in season_style_data["episodes"]:
|
if season_style_data and "episodes" in season_style_data and season_style_data["episodes"] and episode_id in season_style_data["episodes"]:
|
||||||
episode_style_data = season_style_data["episodes"][episode_id]
|
episode_style_data = season_style_data["episodes"][episode_id]
|
||||||
|
@ -2030,7 +2031,7 @@ class MetadataFile(DataFile):
|
||||||
style_data=episode_style_data)
|
style_data=episode_style_data)
|
||||||
if ups:
|
if ups:
|
||||||
updated = True
|
updated = True
|
||||||
logger.info(f"Episode {episode_id} in Season {season_id} of {mapping_name} Metadata Update {'Complete' if updated else 'Not Needed'}")
|
logger.info(f"{title_name} Metadata Update {'Complete' if updated else 'Not Needed'}")
|
||||||
|
|
||||||
if "episodes" in methods and update_episodes and self.library.is_show:
|
if "episodes" in methods and update_episodes and self.library.is_show:
|
||||||
if not meta[methods["episodes"]]:
|
if not meta[methods["episodes"]]:
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
from json import JSONDecodeError
|
from json import JSONDecodeError
|
||||||
from modules import util
|
from modules import util
|
||||||
from modules.util import Failed
|
from modules.util import Failed
|
||||||
from retrying import retry
|
from tenacity import retry, stop_after_attempt, wait_fixed, retry_if_not_exception_type
|
||||||
|
|
||||||
logger = util.logger
|
logger = util.logger
|
||||||
|
|
||||||
|
@ -14,23 +14,20 @@ class Notifiarr:
|
||||||
self.apikey = params["apikey"]
|
self.apikey = params["apikey"]
|
||||||
self.header = {"X-API-Key": self.apikey}
|
self.header = {"X-API-Key": self.apikey}
|
||||||
logger.secret(self.apikey)
|
logger.secret(self.apikey)
|
||||||
try:
|
self._request(path="user", params={"fetch": "settings"})
|
||||||
self.request(path="user", params={"fetch": "settings"})
|
|
||||||
except JSONDecodeError:
|
|
||||||
raise Failed("Notifiarr Error: Invalid JSON response received")
|
|
||||||
|
|
||||||
def notification(self, json):
|
def notification(self, json):
|
||||||
return self.request(json=json)
|
return self._request(json=json)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def request(self, json=None, path="notification", params=None):
|
def _request(self, json=None, path="notification", params=None):
|
||||||
response = self.requests.get(f"{base_url}{path}/pmm/", json=json, headers=self.header, params=params)
|
response = self.requests.get(f"{base_url}{path}/pmm/", json=json, headers=self.header, params=params)
|
||||||
try:
|
try:
|
||||||
response_json = response.json()
|
response_json = response.json()
|
||||||
except JSONDecodeError as e:
|
except JSONDecodeError as e:
|
||||||
logger.error(response.content)
|
logger.debug(f"Content: {response.content}")
|
||||||
logger.debug(e)
|
logger.error(e)
|
||||||
raise e
|
raise Failed("Notifiarr Error: Invalid JSON response received")
|
||||||
if response.status_code >= 400 or ("result" in response_json and response_json["result"] == "error"):
|
if response.status_code >= 400 or ("result" in response_json and response_json["result"] == "error"):
|
||||||
logger.debug(f"Response: {response_json}")
|
logger.debug(f"Response: {response_json}")
|
||||||
raise Failed(f"({response.status_code} [{response.reason}]) {response_json}")
|
raise Failed(f"({response.status_code} [{response.reason}]) {response_json}")
|
||||||
|
|
|
@ -13,6 +13,7 @@ class OMDbObj:
|
||||||
self._data = data
|
self._data = data
|
||||||
if data["Response"] == "False":
|
if data["Response"] == "False":
|
||||||
raise Failed(f"OMDb Error: {data['Error']} IMDb ID: {imdb_id}")
|
raise Failed(f"OMDb Error: {data['Error']} IMDb ID: {imdb_id}")
|
||||||
|
|
||||||
def _parse(key, is_int=False, is_float=False, is_date=False, replace=None):
|
def _parse(key, is_int=False, is_float=False, is_date=False, replace=None):
|
||||||
try:
|
try:
|
||||||
value = str(data[key]).replace(replace, '') if replace else data[key]
|
value = str(data[key]).replace(replace, '') if replace else data[key]
|
||||||
|
@ -26,6 +27,7 @@ class OMDbObj:
|
||||||
return value
|
return value
|
||||||
except (ValueError, TypeError, KeyError):
|
except (ValueError, TypeError, KeyError):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
self.title = _parse("Title")
|
self.title = _parse("Title")
|
||||||
self.year = _parse("Year", is_int=True)
|
self.year = _parse("Year", is_int=True)
|
||||||
self.released = _parse("Released", is_date=True)
|
self.released = _parse("Released", is_date=True)
|
||||||
|
|
|
@ -15,7 +15,7 @@ from plexapi.playlist import Playlist
|
||||||
from plexapi.server import PlexServer
|
from plexapi.server import PlexServer
|
||||||
from plexapi.video import Movie, Show, Season, Episode
|
from plexapi.video import Movie, Show, Season, Episode
|
||||||
from requests.exceptions import ConnectionError, ConnectTimeout
|
from requests.exceptions import ConnectionError, ConnectTimeout
|
||||||
from retrying import retry
|
from tenacity import retry, stop_after_attempt, wait_fixed, retry_if_not_exception_type
|
||||||
from xml.etree.ElementTree import ParseError
|
from xml.etree.ElementTree import ParseError
|
||||||
|
|
||||||
logger = util.logger
|
logger = util.logger
|
||||||
|
@ -560,11 +560,11 @@ class Plex(Library):
|
||||||
return []
|
return []
|
||||||
return self.fetchItems(args)
|
return self.fetchItems(args)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def search(self, title=None, sort=None, maxresults=None, libtype=None, **kwargs):
|
def search(self, title=None, sort=None, maxresults=None, libtype=None, **kwargs):
|
||||||
return self.Plex.search(title=title, sort=sort, maxresults=maxresults, libtype=libtype, **kwargs)
|
return self.Plex.search(title=title, sort=sort, maxresults=maxresults, libtype=libtype, **kwargs)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def exact_search(self, title, libtype=None, year=None):
|
def exact_search(self, title, libtype=None, year=None):
|
||||||
terms = {"title=": title}
|
terms = {"title=": title}
|
||||||
if year:
|
if year:
|
||||||
|
@ -585,11 +585,11 @@ class Plex(Library):
|
||||||
logger.trace(e)
|
logger.trace(e)
|
||||||
raise Failed(f"Plex Error: Item {item} not found")
|
raise Failed(f"Plex Error: Item {item} not found")
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def fetchItem(self, data):
|
def fetchItem(self, data):
|
||||||
return self.PlexServer.fetchItem(data)
|
return self.PlexServer.fetchItem(data)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def fetchItems(self, uri_args):
|
def fetchItems(self, uri_args):
|
||||||
return self.Plex.fetchItems(f"/library/sections/{self.Plex.key}/all{'' if uri_args is None else uri_args}")
|
return self.Plex.fetchItems(f"/library/sections/{self.Plex.key}/all{'' if uri_args is None else uri_args}")
|
||||||
|
|
||||||
|
@ -633,11 +633,11 @@ class Plex(Library):
|
||||||
elif filepath:
|
elif filepath:
|
||||||
self.PlexServer.query(key, method=self.PlexServer._session.post, data=open(filepath, 'rb').read())
|
self.PlexServer.query(key, method=self.PlexServer._session.post, data=open(filepath, 'rb').read())
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def create_playlist(self, name, items):
|
def create_playlist(self, name, items):
|
||||||
return self.PlexServer.createPlaylist(name, items=items)
|
return self.PlexServer.createPlaylist(name, items=items)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def moveItem(self, obj, item, after):
|
def moveItem(self, obj, item, after):
|
||||||
try:
|
try:
|
||||||
obj.moveItem(item, after=after)
|
obj.moveItem(item, after=after)
|
||||||
|
@ -645,7 +645,7 @@ class Plex(Library):
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
raise Failed("Move Failed")
|
raise Failed("Move Failed")
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def query(self, method):
|
def query(self, method):
|
||||||
return method()
|
return method()
|
||||||
|
|
||||||
|
@ -656,30 +656,30 @@ class Plex(Library):
|
||||||
logger.stacktrace()
|
logger.stacktrace()
|
||||||
raise Failed(f"Plex Error: Failed to delete {obj.title}")
|
raise Failed(f"Plex Error: Failed to delete {obj.title}")
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def query_data(self, method, data):
|
def query_data(self, method, data):
|
||||||
return method(data)
|
return method(data)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def tag_edit(self, item, attribute, data, locked=True, remove=False):
|
def tag_edit(self, item, attribute, data, locked=True, remove=False):
|
||||||
return item.editTags(attribute, data, locked=locked, remove=remove)
|
return item.editTags(attribute, data, locked=locked, remove=remove)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def query_collection(self, item, collection, locked=True, add=True):
|
def query_collection(self, item, collection, locked=True, add=True):
|
||||||
if add:
|
if add:
|
||||||
item.addCollection(collection, locked=locked)
|
item.addCollection(collection, locked=locked)
|
||||||
else:
|
else:
|
||||||
item.removeCollection(collection, locked=locked)
|
item.removeCollection(collection, locked=locked)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def collection_mode_query(self, collection, data):
|
def collection_mode_query(self, collection, data):
|
||||||
collection.modeUpdate(mode=data)
|
collection.modeUpdate(mode=data)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def collection_order_query(self, collection, data):
|
def collection_order_query(self, collection, data):
|
||||||
collection.sortUpdate(sort=data)
|
collection.sortUpdate(sort=data)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def item_labels(self, item):
|
def item_labels(self, item):
|
||||||
try:
|
try:
|
||||||
return item.labels
|
return item.labels
|
||||||
|
@ -766,7 +766,7 @@ class Plex(Library):
|
||||||
item_list.append(item)
|
item_list.append(item)
|
||||||
return item_list
|
return item_list
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def reload(self, item, force=False):
|
def reload(self, item, force=False):
|
||||||
is_full = False
|
is_full = False
|
||||||
if not force and item.ratingKey in self.cached_items:
|
if not force and item.ratingKey in self.cached_items:
|
||||||
|
@ -780,14 +780,14 @@ class Plex(Library):
|
||||||
raise Failed(f"Item Failed to Load: {e}")
|
raise Failed(f"Item Failed to Load: {e}")
|
||||||
return item
|
return item
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def edit_query(self, item, edits, advanced=False):
|
def edit_query(self, item, edits, advanced=False):
|
||||||
if advanced:
|
if advanced:
|
||||||
item.editAdvanced(**edits)
|
item.editAdvanced(**edits)
|
||||||
else:
|
else:
|
||||||
item.edit(**edits)
|
item.edit(**edits)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def _upload_image(self, item, image):
|
def _upload_image(self, item, image):
|
||||||
try:
|
try:
|
||||||
if image.is_url and "theposterdb.com" in image.location:
|
if image.is_url and "theposterdb.com" in image.location:
|
||||||
|
@ -810,21 +810,21 @@ class Plex(Library):
|
||||||
item.refresh()
|
item.refresh()
|
||||||
raise Failed(e)
|
raise Failed(e)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def upload_poster(self, item, image, url=False):
|
def upload_poster(self, item, image, url=False):
|
||||||
if url:
|
if url:
|
||||||
item.uploadPoster(url=image)
|
item.uploadPoster(url=image)
|
||||||
else:
|
else:
|
||||||
item.uploadPoster(filepath=image)
|
item.uploadPoster(filepath=image)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def upload_background(self, item, image, url=False):
|
def upload_background(self, item, image, url=False):
|
||||||
if url:
|
if url:
|
||||||
item.uploadArt(url=image)
|
item.uploadArt(url=image)
|
||||||
else:
|
else:
|
||||||
item.uploadArt(filepath=image)
|
item.uploadArt(filepath=image)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def get_actor_id(self, name):
|
def get_actor_id(self, name):
|
||||||
results = self.Plex.hubSearch(name)
|
results = self.Plex.hubSearch(name)
|
||||||
for result in results:
|
for result in results:
|
||||||
|
@ -851,7 +851,7 @@ class Plex(Library):
|
||||||
logger.debug(f"Search Attribute: {final_search}")
|
logger.debug(f"Search Attribute: {final_search}")
|
||||||
raise Failed(f"Plex Error: plex_search attribute: {search_name} not supported")
|
raise Failed(f"Plex Error: plex_search attribute: {search_name} not supported")
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def get_tags(self, tag):
|
def get_tags(self, tag):
|
||||||
if isinstance(tag, str):
|
if isinstance(tag, str):
|
||||||
match = re.match(r'(?:([a-zA-Z]*)\.)?([a-zA-Z]+)', tag)
|
match = re.match(r'(?:([a-zA-Z]*)\.)?([a-zA-Z]+)', tag)
|
||||||
|
@ -872,7 +872,7 @@ class Plex(Library):
|
||||||
items = [i for i in self.Plex.findItems(self.Plex._server.query(tag.key[:-7]), FilterChoice) if i.key not in keys]
|
items = [i for i in self.Plex.findItems(self.Plex._server.query(tag.key[:-7]), FilterChoice) if i.key not in keys]
|
||||||
return items
|
return items
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_plex)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type((BadRequest, NotFound, Unauthorized)))
|
||||||
def _query(self, key, post=False, put=False):
|
def _query(self, key, post=False, put=False):
|
||||||
if post: method = self.Plex._server._session.post
|
if post: method = self.Plex._server._session.post
|
||||||
elif put: method = self.Plex._server._session.put
|
elif put: method = self.Plex._server._session.put
|
||||||
|
|
|
@ -4,7 +4,7 @@ from modules import util
|
||||||
from modules.poster import ImageData
|
from modules.poster import ImageData
|
||||||
from modules.util import Failed
|
from modules.util import Failed
|
||||||
from requests.exceptions import ConnectionError
|
from requests.exceptions import ConnectionError
|
||||||
from retrying import retry
|
from tenacity import retry, stop_after_attempt, wait_fixed
|
||||||
from urllib import parse
|
from urllib import parse
|
||||||
|
|
||||||
logger = util.logger
|
logger = util.logger
|
||||||
|
@ -149,7 +149,7 @@ class Requests:
|
||||||
logger.error(str(response.content))
|
logger.error(str(response.content))
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10))
|
||||||
def get(self, url, json=None, headers=None, params=None, header=None, language=None):
|
def get(self, url, json=None, headers=None, params=None, header=None, language=None):
|
||||||
return self.session.get(url, json=json, headers=get_header(headers, header, language), params=params)
|
return self.session.get(url, json=json, headers=get_header(headers, header, language), params=params)
|
||||||
|
|
||||||
|
@ -167,7 +167,7 @@ class Requests:
|
||||||
logger.error(str(response.content))
|
logger.error(str(response.content))
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10))
|
||||||
def post(self, url, data=None, json=None, headers=None, header=None, language=None):
|
def post(self, url, data=None, json=None, headers=None, header=None, language=None):
|
||||||
return self.session.post(url, data=data, json=json, headers=get_header(headers, header, language))
|
return self.session.post(url, data=data, json=json, headers=get_header(headers, header, language))
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import re
|
import re
|
||||||
from modules import util
|
from modules import util
|
||||||
from modules.util import Failed
|
from modules.util import Failed
|
||||||
from retrying import retry
|
from tenacity import retry, stop_after_attempt, wait_fixed, retry_if_not_exception_type
|
||||||
from tmdbapis import TMDbAPIs, TMDbException, NotFound, Movie
|
from tmdbapis import TMDbAPIs, TMDbException, NotFound, Movie
|
||||||
|
|
||||||
logger = util.logger
|
logger = util.logger
|
||||||
|
@ -128,7 +128,7 @@ class TMDbMovie(TMDBObj):
|
||||||
if self._tmdb.cache and not ignore_cache:
|
if self._tmdb.cache and not ignore_cache:
|
||||||
self._tmdb.cache.update_tmdb_movie(expired, self, self._tmdb.expiration)
|
self._tmdb.cache.update_tmdb_movie(expired, self, self._tmdb.expiration)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def load_movie(self):
|
def load_movie(self):
|
||||||
try:
|
try:
|
||||||
return self._tmdb.TMDb.movie(self.tmdb_id, partial="external_ids,keywords")
|
return self._tmdb.TMDb.movie(self.tmdb_id, partial="external_ids,keywords")
|
||||||
|
@ -165,7 +165,7 @@ class TMDbShow(TMDBObj):
|
||||||
if self._tmdb.cache and not ignore_cache:
|
if self._tmdb.cache and not ignore_cache:
|
||||||
self._tmdb.cache.update_tmdb_show(expired, self, self._tmdb.expiration)
|
self._tmdb.cache.update_tmdb_show(expired, self, self._tmdb.expiration)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def load_show(self):
|
def load_show(self):
|
||||||
try:
|
try:
|
||||||
return self._tmdb.TMDb.tv_show(self.tmdb_id, partial="external_ids,keywords")
|
return self._tmdb.TMDb.tv_show(self.tmdb_id, partial="external_ids,keywords")
|
||||||
|
@ -201,7 +201,7 @@ class TMDbEpisode:
|
||||||
if self._tmdb.cache and not ignore_cache:
|
if self._tmdb.cache and not ignore_cache:
|
||||||
self._tmdb.cache.update_tmdb_episode(expired, self, self._tmdb.expiration)
|
self._tmdb.cache.update_tmdb_episode(expired, self, self._tmdb.expiration)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def load_episode(self):
|
def load_episode(self):
|
||||||
try:
|
try:
|
||||||
return self._tmdb.TMDb.tv_episode(self.tmdb_id, self.season_number, self.episode_number)
|
return self._tmdb.TMDb.tv_episode(self.tmdb_id, self.season_number, self.episode_number)
|
||||||
|
@ -235,7 +235,7 @@ class TMDb:
|
||||||
raise Failed(f"TMDb Error: No {convert_to.upper().replace('B_', 'b ')} found for TMDb ID {tmdb_id}")
|
raise Failed(f"TMDb Error: No {convert_to.upper().replace('B_', 'b ')} found for TMDb ID {tmdb_id}")
|
||||||
return check_id
|
return check_id
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def convert_tvdb_to(self, tvdb_id):
|
def convert_tvdb_to(self, tvdb_id):
|
||||||
try:
|
try:
|
||||||
results = self.TMDb.find_by_id(tvdb_id=tvdb_id)
|
results = self.TMDb.find_by_id(tvdb_id=tvdb_id)
|
||||||
|
@ -245,7 +245,7 @@ class TMDb:
|
||||||
pass
|
pass
|
||||||
raise Failed(f"TMDb Error: No TMDb ID found for TVDb ID {tvdb_id}")
|
raise Failed(f"TMDb Error: No TMDb ID found for TVDb ID {tvdb_id}")
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def convert_imdb_to(self, imdb_id):
|
def convert_imdb_to(self, imdb_id):
|
||||||
try:
|
try:
|
||||||
results = self.TMDb.find_by_id(imdb_id=imdb_id)
|
results = self.TMDb.find_by_id(imdb_id=imdb_id)
|
||||||
|
@ -274,7 +274,7 @@ class TMDb:
|
||||||
def get_show(self, tmdb_id, ignore_cache=False):
|
def get_show(self, tmdb_id, ignore_cache=False):
|
||||||
return TMDbShow(self, tmdb_id, ignore_cache=ignore_cache)
|
return TMDbShow(self, tmdb_id, ignore_cache=ignore_cache)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def get_season(self, tmdb_id, season_number, partial=None):
|
def get_season(self, tmdb_id, season_number, partial=None):
|
||||||
try: return self.TMDb.tv_season(tmdb_id, season_number, partial=partial)
|
try: return self.TMDb.tv_season(tmdb_id, season_number, partial=partial)
|
||||||
except NotFound as e: raise Failed(f"TMDb Error: No Season found for TMDb ID {tmdb_id} Season {season_number}: {e}")
|
except NotFound as e: raise Failed(f"TMDb Error: No Season found for TMDb ID {tmdb_id} Season {season_number}: {e}")
|
||||||
|
@ -282,41 +282,41 @@ class TMDb:
|
||||||
def get_episode(self, tmdb_id, season_number, episode_number, ignore_cache=False):
|
def get_episode(self, tmdb_id, season_number, episode_number, ignore_cache=False):
|
||||||
return TMDbEpisode(self, tmdb_id, season_number, episode_number, ignore_cache=ignore_cache)
|
return TMDbEpisode(self, tmdb_id, season_number, episode_number, ignore_cache=ignore_cache)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def get_collection(self, tmdb_id, partial=None):
|
def get_collection(self, tmdb_id, partial=None):
|
||||||
try: return self.TMDb.collection(tmdb_id, partial=partial)
|
try: return self.TMDb.collection(tmdb_id, partial=partial)
|
||||||
except NotFound as e: raise Failed(f"TMDb Error: No Collection found for TMDb ID {tmdb_id}: {e}")
|
except NotFound as e: raise Failed(f"TMDb Error: No Collection found for TMDb ID {tmdb_id}: {e}")
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def get_person(self, tmdb_id, partial=None):
|
def get_person(self, tmdb_id, partial=None):
|
||||||
try: return self.TMDb.person(tmdb_id, partial=partial)
|
try: return self.TMDb.person(tmdb_id, partial=partial)
|
||||||
except NotFound as e: raise Failed(f"TMDb Error: No Person found for TMDb ID {tmdb_id}: {e}")
|
except NotFound as e: raise Failed(f"TMDb Error: No Person found for TMDb ID {tmdb_id}: {e}")
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def _company(self, tmdb_id, partial=None):
|
def _company(self, tmdb_id, partial=None):
|
||||||
try: return self.TMDb.company(tmdb_id, partial=partial)
|
try: return self.TMDb.company(tmdb_id, partial=partial)
|
||||||
except NotFound as e: raise Failed(f"TMDb Error: No Company found for TMDb ID {tmdb_id}: {e}")
|
except NotFound as e: raise Failed(f"TMDb Error: No Company found for TMDb ID {tmdb_id}: {e}")
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def _network(self, tmdb_id, partial=None):
|
def _network(self, tmdb_id, partial=None):
|
||||||
try: return self.TMDb.network(tmdb_id, partial=partial)
|
try: return self.TMDb.network(tmdb_id, partial=partial)
|
||||||
except NotFound as e: raise Failed(f"TMDb Error: No Network found for TMDb ID {tmdb_id}: {e}")
|
except NotFound as e: raise Failed(f"TMDb Error: No Network found for TMDb ID {tmdb_id}: {e}")
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def _keyword(self, tmdb_id):
|
def _keyword(self, tmdb_id):
|
||||||
try: return self.TMDb.keyword(tmdb_id)
|
try: return self.TMDb.keyword(tmdb_id)
|
||||||
except NotFound as e: raise Failed(f"TMDb Error: No Keyword found for TMDb ID {tmdb_id}: {e}")
|
except NotFound as e: raise Failed(f"TMDb Error: No Keyword found for TMDb ID {tmdb_id}: {e}")
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def get_list(self, tmdb_id):
|
def get_list(self, tmdb_id):
|
||||||
try: return self.TMDb.list(tmdb_id)
|
try: return self.TMDb.list(tmdb_id)
|
||||||
except NotFound as e: raise Failed(f"TMDb Error: No List found for TMDb ID {tmdb_id}: {e}")
|
except NotFound as e: raise Failed(f"TMDb Error: No List found for TMDb ID {tmdb_id}: {e}")
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def get_popular_people(self, limit):
|
def get_popular_people(self, limit):
|
||||||
return {str(p.id): p.name for p in self.TMDb.popular_people().get_results(limit)}
|
return {str(p.id): p.name for p in self.TMDb.popular_people().get_results(limit)}
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def search_people(self, name):
|
def search_people(self, name):
|
||||||
try: return self.TMDb.people_search(name)
|
try: return self.TMDb.people_search(name)
|
||||||
except NotFound: raise Failed(f"TMDb Error: Actor {name} Not Found")
|
except NotFound: raise Failed(f"TMDb Error: Actor {name} Not Found")
|
||||||
|
@ -342,7 +342,7 @@ class TMDb:
|
||||||
elif tmdb_type == "List": self.get_list(tmdb_id)
|
elif tmdb_type == "List": self.get_list(tmdb_id)
|
||||||
return tmdb_id
|
return tmdb_id
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def get_items(self, method, data, region, is_movie, result_type):
|
def get_items(self, method, data, region, is_movie, result_type):
|
||||||
if method == "tmdb_popular":
|
if method == "tmdb_popular":
|
||||||
results = self.TMDb.popular_movies(region=region) if is_movie else self.TMDb.popular_tv()
|
results = self.TMDb.popular_movies(region=region) if is_movie else self.TMDb.popular_tv()
|
||||||
|
|
|
@ -2,7 +2,7 @@ import time, webbrowser
|
||||||
from modules import util
|
from modules import util
|
||||||
from modules.request import urlparse
|
from modules.request import urlparse
|
||||||
from modules.util import Failed, TimeoutExpired
|
from modules.util import Failed, TimeoutExpired
|
||||||
from retrying import retry
|
from tenacity import retry, stop_after_attempt, wait_fixed, retry_if_not_exception_type
|
||||||
|
|
||||||
logger = util.logger
|
logger = util.logger
|
||||||
|
|
||||||
|
@ -198,7 +198,7 @@ class Trakt:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def _request(self, url, params=None, json_data=None):
|
def _request(self, url, params=None, json_data=None):
|
||||||
headers = {
|
headers = {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
|
|
|
@ -5,7 +5,7 @@ from lxml.etree import ParserError
|
||||||
from modules import util
|
from modules import util
|
||||||
from modules.util import Failed
|
from modules.util import Failed
|
||||||
from requests.exceptions import MissingSchema
|
from requests.exceptions import MissingSchema
|
||||||
from retrying import retry
|
from tenacity import retry, stop_after_attempt, wait_fixed, retry_if_not_exception_type
|
||||||
|
|
||||||
logger = util.logger
|
logger = util.logger
|
||||||
|
|
||||||
|
@ -115,7 +115,7 @@ class TVDb:
|
||||||
tvdb_id, _, _ = self.get_id_from_url(tvdb_url, is_movie=is_movie)
|
tvdb_id, _, _ = self.get_id_from_url(tvdb_url, is_movie=is_movie)
|
||||||
return TVDbObj(self, tvdb_id, is_movie=is_movie)
|
return TVDbObj(self, tvdb_id, is_movie=is_movie)
|
||||||
|
|
||||||
@retry(stop_max_attempt_number=6, wait_fixed=10000, retry_on_exception=util.retry_if_not_failed)
|
@retry(stop=stop_after_attempt(6), wait=wait_fixed(10), retry=retry_if_not_exception_type(Failed))
|
||||||
def get_request(self, tvdb_url):
|
def get_request(self, tvdb_url):
|
||||||
response = self.requests.get(tvdb_url, language=self.language)
|
response = self.requests.get(tvdb_url, language=self.language)
|
||||||
if response.status_code >= 400:
|
if response.status_code >= 400:
|
||||||
|
|
|
@ -4,8 +4,10 @@ from modules.logs import MyLogger
|
||||||
from num2words import num2words
|
from num2words import num2words
|
||||||
from pathvalidate import is_valid_filename, sanitize_filename
|
from pathvalidate import is_valid_filename, sanitize_filename
|
||||||
from plexapi.audio import Album, Track
|
from plexapi.audio import Album, Track
|
||||||
from plexapi.exceptions import BadRequest, NotFound, Unauthorized
|
|
||||||
from plexapi.video import Season, Episode, Movie
|
from plexapi.video import Season, Episode, Movie
|
||||||
|
from requests.exceptions import HTTPError
|
||||||
|
from tenacity import retry_if_exception
|
||||||
|
from tenacity.wait import wait_base
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import msvcrt
|
import msvcrt
|
||||||
|
@ -43,11 +45,32 @@ class NotScheduled(Exception):
|
||||||
class NotScheduledRange(NotScheduled):
|
class NotScheduledRange(NotScheduled):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def retry_if_not_failed(exception):
|
|
||||||
return not isinstance(exception, Failed)
|
|
||||||
|
|
||||||
def retry_if_not_plex(exception):
|
class retry_if_http_429_error(retry_if_exception):
|
||||||
return not isinstance(exception, (BadRequest, NotFound, Unauthorized, Failed))
|
|
||||||
|
def __init__(self):
|
||||||
|
def is_http_429_error(exception: BaseException) -> bool:
|
||||||
|
return isinstance(exception, HTTPError) and exception.response.status_code == 429
|
||||||
|
|
||||||
|
super().__init__(predicate=is_http_429_error)
|
||||||
|
|
||||||
|
|
||||||
|
class wait_for_retry_after_header(wait_base):
|
||||||
|
def __init__(self, fallback):
|
||||||
|
self.fallback = fallback
|
||||||
|
|
||||||
|
def __call__(self, retry_state):
|
||||||
|
exc = retry_state.outcome.exception()
|
||||||
|
if isinstance(exc, HTTPError):
|
||||||
|
retry_after = exc.response.headers.get("Retry-After", None)
|
||||||
|
try:
|
||||||
|
if retry_after is not None:
|
||||||
|
return int(retry_after)
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
return self.fallback(retry_state)
|
||||||
|
|
||||||
|
|
||||||
days_alias = {
|
days_alias = {
|
||||||
"monday": 0, "mon": 0, "m": 0,
|
"monday": 0, "mon": 0, "m": 0,
|
||||||
|
|
|
@ -9,7 +9,7 @@ psutil==5.9.8
|
||||||
python-dotenv==1.0.1
|
python-dotenv==1.0.1
|
||||||
python-dateutil==2.9.0.post0
|
python-dateutil==2.9.0.post0
|
||||||
requests==2.32.3
|
requests==2.32.3
|
||||||
retrying==1.3.4
|
tenacity==8.3.0
|
||||||
ruamel.yaml==0.18.6
|
ruamel.yaml==0.18.6
|
||||||
schedule==1.2.2
|
schedule==1.2.2
|
||||||
setuptools==70.0.0
|
setuptools==70.0.0
|
||||||
|
|
Loading…
Reference in a new issue