Compare commits

..

No commits in common. "main" and "v7.0.0" have entirely different histories.
main ... v7.0.0

20 changed files with 874 additions and 1621 deletions

117
.env
View File

@ -1,117 +0,0 @@
# Global Settings
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
DRYRUN = "False"
## Debugging level, "info" is default, "debug" is more verbose
#DEBUG_LEVEL = "DEBUG"
DEBUG_LEVEL = "INFO"
## If set to true then the script will only run once and then exit
RUN_ONLY_ONCE = "True"
## How often to run the script in seconds
SLEEP_DURATION = "60"
## Log file where all output will be written to
LOG_FILE = "/mnt/log.log"
## Mark file where all shows/movies that have been marked as played will be written to
MARK_FILE = "/mnt/mark.log"
## Timeout for requests for jellyfin
REQUEST_TIMEOUT = 300
## Max threads for processing
MAX_THREADS = 1
## Generate guids/locations
## These are slow processes, so this is a way to speed things up
## If media servers are using the same files then you can enable only generate locations
## If media servers are using different files then you can enable only generate guids
## Default is to generate both
GENERATE_GUIDS = "True"
GENERATE_LOCATIONS = "True"
## Map usernames between servers in the event that they are different, order does not matter
## Comma separated for multiple options
# jellyfin: plex,plex
#USER_MAPPING = { "belandbroc": "debila,belan49", "debila,belan49": "belandbroc", "debila": "belandbroc", "belan49": "belandbroc" }
USER_MAPPING = { "belandbroc":"debila", "debila":"belandbroc", "debila":"belandbroc" }
## Map libraries between servers in the event that they are different, order does not matter
## Comma separated for multiple options
LIBRARY_MAPPING = { "TV Shows": "Scratch TV Shows", "Scratch TV Shows": "TV Shows" }
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
## Comma separated for multiple options
#BLACKLIST_LIBRARY = ""
WHITELIST_LIBRARY = "TV Shows,Scratch TV Shows,Movies"
#BLACKLIST_LIBRARY_TYPE = ""
#WHITELIST_LIBRARY_TYPE = ""
#BLACKLIST_USERS = ""
WHITELIST_USERS = "belandbroc,debila"
# Plex
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
## Comma separated list for multiple servers
PLEX_BASEURL = "http://192.168.0.86:32400"
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
## Comma separated list for multiple servers
# PLEX_TOKEN = "vPGyuy6zWVCz6ZFyy8x1"
# # debila=debilapointe@gmail
PLEX_TOKEN = "S7gbVzAzH4ypN-4K7ta5"
# me
## If not using plex token then use username and password of the server admin along with the servername
## Comma separated for multiple options
#PLEX_USERNAME = "squeaky2x3@gmail.com"
#PLEX_PASSWORD = "qoDuGNsGsWRurOd5QFdRy2@"
#PLEX_SERVERNAME = "Scratch"
## Skip hostname validation for ssl certificates.
## Set to True if running into ssl certificate errors
SSL_BYPASS = "True"
# Jellyfin
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
## Comma separated list for multiple servers
JELLYFIN_BASEURL = "https://jellyfin.home.blapointe.com"
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
## Comma separated list for multiple servers
JELLYFIN_TOKEN = "1dc766ce6ca44c53b773263a06889b96"
# # Emby
#
# ## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
# ## Comma seperated list for multiple servers
# EMBY_BASEURL = "http://localhost:8097"
#
# ## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
# ## Comma seperated list for multiple servers
# EMBY_TOKEN = "SuperSecretToken"
# Syncing Options
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
SYNC_FROM_PLEX_TO_PLEX = "False"
#SYNC_FROM_PLEX_TO_EMBY = "False"
SYNC_FROM_JELLYFIN_TO_PLEX = "False"
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "False"
#SYNC_FROM_JELLYFIN_TO_EMBY = "False"
#SYNC_FROM_EMBY_TO_PLEX = "False"
#SYNC_FROM_EMBY_TO_JELLYFIN = "False"
#SYNC_FROM_EMBY_TO_EMBY = "False"

View File

@ -3,8 +3,11 @@
## Do not mark any shows/movies as played and instead just output to log if they would of been marked. ## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
DRYRUN = "True" DRYRUN = "True"
## Additional logging information
DEBUG = "False"
## Debugging level, "info" is default, "debug" is more verbose ## Debugging level, "info" is default, "debug" is more verbose
DEBUG_LEVEL = "INFO" DEBUG_LEVEL = "info"
## If set to true then the script will only run once and then exit ## If set to true then the script will only run once and then exit
RUN_ONLY_ONCE = "False" RUN_ONLY_ONCE = "False"
@ -13,7 +16,7 @@ RUN_ONLY_ONCE = "False"
SLEEP_DURATION = "3600" SLEEP_DURATION = "3600"
## Log file where all output will be written to ## Log file where all output will be written to
LOG_FILE = "log.log" LOGFILE = "log.log"
## Mark file where all shows/movies that have been marked as played will be written to ## Mark file where all shows/movies that have been marked as played will be written to
MARK_FILE = "mark.log" MARK_FILE = "mark.log"
@ -21,24 +24,26 @@ MARK_FILE = "mark.log"
## Timeout for requests for jellyfin ## Timeout for requests for jellyfin
REQUEST_TIMEOUT = 300 REQUEST_TIMEOUT = 300
## Max threads for processing ## Generate guids
MAX_THREADS = 1 ## Generating guids is a slow process, so this is a way to speed up the process
## by using the location only, useful when using same files on multiple servers
## Generate guids/locations
## These are slow processes, so this is a way to speed things up
## If media servers are using the same files then you can enable only generate locations
## If media servers are using different files then you can enable only generate guids
## Default is to generate both
GENERATE_GUIDS = "True" GENERATE_GUIDS = "True"
## Generate locations
## Generating locations is a slow process, so this is a way to speed up the process
## by using the guid only, useful when using different files on multiple servers
GENERATE_LOCATIONS = "True" GENERATE_LOCATIONS = "True"
## Max threads for processing
MAX_THREADS = 2
## Map usernames between servers in the event that they are different, order does not matter ## Map usernames between servers in the event that they are different, order does not matter
## Comma separated for multiple options ## Comma separated for multiple options
USER_MAPPING = { "Username": "User", "Second User": "User Dos" } #USER_MAPPING = { "testuser2": "testuser3", "testuser1":"testuser4" }
## Map libraries between servers in the event that they are different, order does not matter ## Map libraries between servers in the event that they are different, order does not matter
## Comma separated for multiple options ## Comma separated for multiple options
LIBRARY_MAPPING = { "Shows": "TV Shows", "Movie": "Movies" } #LIBRARY_MAPPING = { "Shows": "TV Shows", "Movie": "Movies" }
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded. ## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
## Comma separated for multiple options ## Comma separated for multiple options
@ -47,7 +52,7 @@ LIBRARY_MAPPING = { "Shows": "TV Shows", "Movie": "Movies" }
#BLACKLIST_LIBRARY_TYPE = "" #BLACKLIST_LIBRARY_TYPE = ""
#WHITELIST_LIBRARY_TYPE = "" #WHITELIST_LIBRARY_TYPE = ""
#BLACKLIST_USERS = "" #BLACKLIST_USERS = ""
#WHITELIST_USERS = "" WHITELIST_USERS = "testuser1,testuser2"
# Plex # Plex
@ -91,7 +96,7 @@ EMBY_BASEURL = "http://localhost:8097"
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key ## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
## Comma seperated list for multiple servers ## Comma seperated list for multiple servers
EMBY_TOKEN = "SuperSecretToken" EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
# Syncing Options # Syncing Options

View File

@ -20,7 +20,7 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install uv - name: Install uv
uses: astral-sh/setup-uv@v6 uses: astral-sh/setup-uv@v5
- name: "Set up Python" - name: "Set up Python"
uses: actions/setup-python@v5 uses: actions/setup-python@v5
@ -28,7 +28,7 @@ jobs:
python-version-file: ".python-version" python-version-file: ".python-version"
- name: "Install dependencies" - name: "Install dependencies"
run: uv sync --frozen run: uv sync --all-extras --dev
- name: "Run tests" - name: "Run tests"
run: uv run pytest -vvv run: uv run pytest -vvv
@ -39,7 +39,7 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install uv - name: Install uv
uses: astral-sh/setup-uv@v6 uses: astral-sh/setup-uv@v5
- name: "Set up Python" - name: "Set up Python"
uses: actions/setup-python@v5 uses: actions/setup-python@v5
@ -48,7 +48,7 @@ jobs:
- name: "Install dependencies" - name: "Install dependencies"
run: | run: |
uv sync --frozen uv sync --all-extras --dev
sudo apt update && sudo apt install -y docker-compose sudo apt update && sudo apt install -y docker-compose
- name: "Checkout JellyPlex-Watched-CI" - name: "Checkout JellyPlex-Watched-CI"
@ -70,35 +70,40 @@ jobs:
- name: "Test Plex" - name: "Test Plex"
run: | run: |
ENV_FILE="test/ci_plex.env" uv run main.py mv test/ci_plex.env .env
uv run main.py
uv run test/validate_ci_marklog.py --plex uv run test/validate_ci_marklog.py --plex
rm mark.log rm mark.log
- name: "Test Jellyfin" - name: "Test Jellyfin"
run: | run: |
ENV_FILE="test/ci_jellyfin.env" uv run main.py mv test/ci_jellyfin.env .env
uv run main.py
uv run test/validate_ci_marklog.py --jellyfin uv run test/validate_ci_marklog.py --jellyfin
rm mark.log rm mark.log
- name: "Test Emby" - name: "Test Emby"
run: | run: |
ENV_FILE="test/ci_emby.env" uv run main.py mv test/ci_emby.env .env
uv run main.py
uv run test/validate_ci_marklog.py --emby uv run test/validate_ci_marklog.py --emby
rm mark.log rm mark.log
- name: "Test Guids" - name: "Test Guids"
run: | run: |
ENV_FILE="test/ci_guids.env" uv run main.py mv test/ci_guids.env .env
uv run main.py
uv run test/validate_ci_marklog.py --guids uv run test/validate_ci_marklog.py --guids
rm mark.log rm mark.log
- name: "Test Locations" - name: "Test Locations"
run: | run: |
ENV_FILE="test/ci_locations.env" uv run main.py mv test/ci_locations.env .env
uv run main.py
uv run test/validate_ci_marklog.py --locations uv run test/validate_ci_marklog.py --locations
rm mark.log rm mark.log
@ -106,10 +111,11 @@ jobs:
- name: "Test writing to the servers" - name: "Test writing to the servers"
run: | run: |
# Test writing to the servers # Test writing to the servers
ENV_FILE="test/ci_write.env" uv run main.py mv test/ci_write.env .env
uv run main.py
# Test again to test if it can handle existing data # Test again to test if it can handle existing data
ENV_FILE="test/ci_write.env" uv run main.py uv run main.py
uv run test/validate_ci_marklog.py --write uv run test/validate_ci_marklog.py --write
@ -172,7 +178,6 @@ jobs:
env: env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
if: "${{ env.DOCKER_USERNAME != '' }}" if: "${{ env.DOCKER_USERNAME != '' }}"
id: docker_login
uses: docker/login-action@v3 uses: docker/login-action@v3
with: with:
username: ${{ secrets.DOCKER_USERNAME }} username: ${{ secrets.DOCKER_USERNAME }}
@ -186,14 +191,26 @@ jobs:
username: ${{ github.actor }} username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Build Push - name: Build
id: build_push id: build
uses: docker/build-push-action@v6 if: "${{ steps.docker_meta.outputs.tags == '' }}"
uses: docker/build-push-action@v5
with: with:
context: . context: .
file: ${{ matrix.dockerfile }} file: ${{ matrix.dockerfile }}
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
push: ${{ steps.docker_login.outcome == 'success' }} push: false
tags: jellyplex-watched:action
- name: Build Push
id: build_push
if: "${{ steps.docker_meta.outputs.tags != '' }}"
uses: docker/build-push-action@v5
with:
context: .
file: ${{ matrix.dockerfile }}
platforms: linux/amd64,linux/arm64
push: true
tags: ${{ steps.docker_meta.outputs.tags }} tags: ${{ steps.docker_meta.outputs.tags }}
labels: ${{ steps.docker_meta.outputs.labels }} labels: ${{ steps.docker_meta.outputs.labels }}

View File

@ -19,7 +19,6 @@ Keep in sync all your users watched history between jellyfin, plex and emby serv
- \[x] One way/multi way sync - \[x] One way/multi way sync
- \[x] Sync watched - \[x] Sync watched
- \[x] Sync in progress - \[x] Sync in progress
- \[ ] Sync view dates
### Jellyfin ### Jellyfin
@ -30,8 +29,6 @@ Keep in sync all your users watched history between jellyfin, plex and emby serv
- \[x] One way/multi way sync - \[x] One way/multi way sync
- \[x] Sync watched - \[x] Sync watched
- \[x] Sync in progress - \[x] Sync in progress
- \[x] Sync view dates
### Emby ### Emby
@ -42,8 +39,6 @@ Keep in sync all your users watched history between jellyfin, plex and emby serv
- \[x] One way/multi way sync - \[x] One way/multi way sync
- \[x] Sync watched - \[x] Sync watched
- \[x] Sync in progress - \[x] Sync in progress
- \[x] Sync view dates
## Configuration ## Configuration
@ -53,18 +48,20 @@ Full list of configuration options can be found in the [.env.sample](.env.sample
### Baremetal ### Baremetal
- [Install uv](https://docs.astral.sh/uv/getting-started/installation/) - Setup virtualenv of your choice
- Create a .env file similar to .env.sample; fill in baseurls and tokens, **remember to uncomment anything you wish to use** (e.g., user mapping, library mapping, black/whitelist, etc.). If you want to store your .env file anywhere else or under a different name you can use ENV_FILE variable to specify the location. - Install dependencies
```bash
pip install -r requirements.txt
```
- Create a .env file similar to .env.sample, uncomment whitelist and blacklist if needed, fill in baseurls and tokens
- Run - Run
```bash ```bash
uv run main.py python main.py
```
```bash
ENV_FILE="Test.env" uv run main.py
``` ```
### Docker ### Docker
@ -107,7 +104,6 @@ Full list of configuration options can be found in the [.env.sample](.env.sample
- Configuration - Configuration
- Do not use quotes around variables in docker compose - Do not use quotes around variables in docker compose
- If you are not running all 3 supported servers, that is, Plex, Jellyfin, and Emby simultaneously, make sure to comment out the server url and token of the server you aren't using.
## Contributing ## Contributing

0
entrypoint.sh Executable file → Normal file
View File

View File

@ -1,24 +1,22 @@
[project] [project]
name = "jellyplex-watched" name = "jellyplex-watched"
version = "8.3.0" version = "6.1.2"
description = "Sync watched between media servers locally" description = "Sync watched between media servers locally"
readme = "README.md" readme = "README.md"
requires-python = ">=3.12" requires-python = ">=3.12"
dependencies = [ dependencies = [
"loguru>=0.7.3", "loguru>=0.7.3",
"packaging==25.0", "packaging==24.2",
"plexapi==4.17.1", "plexapi==4.16.1",
"pydantic==2.11.7", "pydantic==2.10.6",
"python-dotenv==1.1.1", "python-dotenv==1.0.0",
"requests==2.32.5", "requests==2.32.3",
] ]
[dependency-groups] [dependency-groups]
lint = [ lint = [
"ruff>=0.12.3", "ruff>=0.9.6",
] ]
dev = [ dev = [
"mypy>=1.16.1", "pytest>=8.3.4",
"pytest>=8.4.1",
"types-requests>=2.32.0.20250611",
] ]

10
run.sh
View File

@ -1,10 +0,0 @@
#! /usr/bin/env bash
d=/tmp/jellyplex.d
mkdir -p $d
docker run --rm -it -v "$d":/mnt $(
if [ "${PWD##*/}" == JellyPlex-Watched ]; then
echo "-v $PWD/src:/app/src"
fi
) -v $PWD/.env:/app/.env \
luigi311/jellyplex-watched:latest

View File

@ -12,7 +12,7 @@ def setup_black_white_lists(
whitelist_users: list[str] | None, whitelist_users: list[str] | None,
library_mapping: dict[str, str] | None = None, library_mapping: dict[str, str] | None = None,
user_mapping: dict[str, str] | None = None, user_mapping: dict[str, str] | None = None,
) -> tuple[list[str], list[str], list[str], list[str], list[str], list[str]]: ):
blacklist_library, blacklist_library_type, blacklist_users = setup_x_lists( blacklist_library, blacklist_library_type, blacklist_users = setup_x_lists(
blacklist_library, blacklist_library,
blacklist_library_type, blacklist_library_type,

View File

@ -1,17 +1,18 @@
import os
from typing import Literal from typing import Literal
from dotenv import load_dotenv
from loguru import logger from loguru import logger
from src.functions import str_to_bool, get_env_value from src.functions import str_to_bool
from src.plex import Plex from src.plex import Plex
from src.jellyfin import Jellyfin from src.jellyfin import Jellyfin
from src.emby import Emby from src.emby import Emby
load_dotenv(override=True)
def jellyfin_emby_server_connection( def jellyfin_emby_server_connection(
env, server_baseurl: str, server_token: str, server_type: Literal["jellyfin", "emby"]
server_baseurl: str,
server_token: str,
server_type: Literal["jellyfin", "emby"],
) -> list[Jellyfin | Emby]: ) -> list[Jellyfin | Emby]:
servers: list[Jellyfin | Emby] = [] servers: list[Jellyfin | Emby] = []
server: Jellyfin | Emby server: Jellyfin | Emby
@ -24,19 +25,17 @@ def jellyfin_emby_server_connection(
f"{server_type.upper()}_BASEURL and {server_type.upper()}_TOKEN must have the same number of entries" f"{server_type.upper()}_BASEURL and {server_type.upper()}_TOKEN must have the same number of entries"
) )
for i, base_url in enumerate(server_baseurls): for i, baseurl in enumerate(server_baseurls):
base_url = base_url.strip() baseurl = baseurl.strip()
if base_url[-1] == "/": if baseurl[-1] == "/":
base_url = base_url[:-1] baseurl = baseurl[:-1]
if server_type == "jellyfin": if server_type == "jellyfin":
server = Jellyfin( server = Jellyfin(baseurl=baseurl, token=server_tokens[i].strip())
env=env, base_url=base_url, token=server_tokens[i].strip()
)
servers.append(server) servers.append(server)
elif server_type == "emby": elif server_type == "emby":
server = Emby(env=env, base_url=base_url, token=server_tokens[i].strip()) server = Emby(baseurl=baseurl, token=server_tokens[i].strip())
servers.append(server) servers.append(server)
else: else:
raise Exception("Unknown server type") raise Exception("Unknown server type")
@ -46,17 +45,16 @@ def jellyfin_emby_server_connection(
return servers return servers
def generate_server_connections(env) -> list[Plex | Jellyfin | Emby]: def generate_server_connections() -> list[Plex | Jellyfin | Emby]:
servers: list[Plex | Jellyfin | Emby] = [] servers: list[Plex | Jellyfin | Emby] = []
plex_baseurl_str: str | None = get_env_value(env, "PLEX_BASEURL", None) plex_baseurl_str: str | None = os.getenv("PLEX_BASEURL", None)
plex_token_str: str | None = get_env_value(env, "PLEX_TOKEN", None) plex_token_str: str | None = os.getenv("PLEX_TOKEN", None)
plex_username_str: str | None = get_env_value(env, "PLEX_USERNAME", None) plex_username_str: str | None = os.getenv("PLEX_USERNAME", None)
plex_password_str: str | None = get_env_value(env, "PLEX_PASSWORD", None) plex_password_str: str | None = os.getenv("PLEX_PASSWORD", None)
plex_servername_str: str | None = get_env_value(env, "PLEX_SERVERNAME", None) plex_servername_str: str | None = os.getenv("PLEX_SERVERNAME", None)
ssl_bypass = str_to_bool(get_env_value(env, "SSL_BYPASS", "False")) ssl_bypass = str_to_bool(os.getenv("SSL_BYPASS", "False"))
print(f"if plex_baseurl_str={plex_baseurl_str} and plex_token_str={plex_token_str}")
if plex_baseurl_str and plex_token_str: if plex_baseurl_str and plex_token_str:
plex_baseurl = plex_baseurl_str.split(",") plex_baseurl = plex_baseurl_str.split(",")
plex_token = plex_token_str.split(",") plex_token = plex_token_str.split(",")
@ -67,14 +65,12 @@ def generate_server_connections(env) -> list[Plex | Jellyfin | Emby]:
) )
for i, url in enumerate(plex_baseurl): for i, url in enumerate(plex_baseurl):
print(f"Plex({url.strip()}, {plex_token[i].strip()})")
server = Plex( server = Plex(
env, baseurl=url.strip(),
base_url=url.strip(),
token=plex_token[i].strip(), token=plex_token[i].strip(),
user_name=None, username=None,
password=None, password=None,
server_name=None, servername=None,
ssl_bypass=ssl_bypass, ssl_bypass=ssl_bypass,
) )
@ -96,32 +92,31 @@ def generate_server_connections(env) -> list[Plex | Jellyfin | Emby]:
for i, username in enumerate(plex_username): for i, username in enumerate(plex_username):
server = Plex( server = Plex(
env, baseurl=None,
base_url=None,
token=None, token=None,
user_name=username.strip(), username=username.strip(),
password=plex_password[i].strip(), password=plex_password[i].strip(),
server_name=plex_servername[i].strip(), servername=plex_servername[i].strip(),
ssl_bypass=ssl_bypass, ssl_bypass=ssl_bypass,
) )
logger.debug(f"Plex Server {i} info: {server.info()}") logger.debug(f"Plex Server {i} info: {server.info()}")
servers.append(server) servers.append(server)
jellyfin_baseurl = get_env_value(env, "JELLYFIN_BASEURL", None) jellyfin_baseurl = os.getenv("JELLYFIN_BASEURL", None)
jellyfin_token = get_env_value(env, "JELLYFIN_TOKEN", None) jellyfin_token = os.getenv("JELLYFIN_TOKEN", None)
if jellyfin_baseurl and jellyfin_token: if jellyfin_baseurl and jellyfin_token:
servers.extend( servers.extend(
jellyfin_emby_server_connection( jellyfin_emby_server_connection(
env, jellyfin_baseurl, jellyfin_token, "jellyfin" jellyfin_baseurl, jellyfin_token, "jellyfin"
) )
) )
emby_baseurl = get_env_value(env, "EMBY_BASEURL", None) emby_baseurl = os.getenv("EMBY_BASEURL", None)
emby_token = get_env_value(env, "EMBY_TOKEN", None) emby_token = os.getenv("EMBY_TOKEN", None)
if emby_baseurl and emby_token: if emby_baseurl and emby_token:
servers.extend( servers.extend(
jellyfin_emby_server_connection(env, emby_baseurl, emby_token, "emby") jellyfin_emby_server_connection(emby_baseurl, emby_token, "emby")
) )
return servers return servers

View File

@ -1,10 +1,9 @@
from src.jellyfin_emby import JellyfinEmby from src.jellyfin_emby import JellyfinEmby
from packaging.version import parse, Version from packaging.version import parse, Version
from loguru import logger
class Emby(JellyfinEmby): class Emby(JellyfinEmby):
def __init__(self, env, base_url: str, token: str) -> None: def __init__(self, baseurl, token):
authorization = ( authorization = (
"Emby , " "Emby , "
'Client="JellyPlex-Watched", ' 'Client="JellyPlex-Watched", '
@ -19,14 +18,8 @@ class Emby(JellyfinEmby):
} }
super().__init__( super().__init__(
env, server_type="Emby", base_url=base_url, token=token, headers=headers server_type="Emby", baseurl=baseurl, token=token, headers=headers
) )
def is_partial_update_supported(self, server_version: Version) -> bool: def is_partial_update_supported(self, server_version: Version) -> bool:
if not server_version >= parse("4.4"): return server_version > parse("4.4")
logger.info(
f"{self.server_type}: Server version {server_version} does not support updating playback position.",
)
return False
return True

View File

@ -2,11 +2,11 @@ import os
from concurrent.futures import Future, ThreadPoolExecutor from concurrent.futures import Future, ThreadPoolExecutor
from typing import Any, Callable from typing import Any, Callable
from dotenv import load_dotenv from dotenv import load_dotenv
import re
from pathlib import PureWindowsPath, PurePosixPath
load_dotenv(override=True) load_dotenv(override=True)
mark_file = os.getenv("MARK_FILE", os.getenv("MARKFILE", "mark.log"))
def log_marked( def log_marked(
server_type: str, server_type: str,
@ -16,8 +16,7 @@ def log_marked(
movie_show: str, movie_show: str,
episode: str | None = None, episode: str | None = None,
duration: float | None = None, duration: float | None = None,
mark_file: str = "mark.log", ):
) -> None:
output = f"{server_type}/{server_name}/{username}/{library}/{movie_show}" output = f"{server_type}/{server_name}/{username}/{library}/{movie_show}"
if episode: if episode:
@ -26,22 +25,13 @@ def log_marked(
if duration: if duration:
output += f"/{duration}" output += f"/{duration}"
with open(mark_file, "a", encoding="utf-8") as file: with open(f"{mark_file}", "a", encoding="utf-8") as file:
file.write(output + "\n") file.write(output + "\n")
def get_env_value(env, key: str, default: Any = None):
if env and key in env:
return env[key]
elif os.getenv(key):
return os.getenv(key)
else:
return default
# Reimplementation of distutils.util.strtobool due to it being deprecated # Reimplementation of distutils.util.strtobool due to it being deprecated
# Source: https://github.com/PostHog/posthog/blob/01e184c29d2c10c43166f1d40a334abbc3f99d8a/posthog/utils.py#L668 # Source: https://github.com/PostHog/posthog/blob/01e184c29d2c10c43166f1d40a334abbc3f99d8a/posthog/utils.py#L668
def str_to_bool(value: str | None) -> bool: def str_to_bool(value: str) -> bool:
if not value: if not value:
return False return False
return str(value).lower() in ("y", "yes", "t", "true", "on", "1") return str(value).lower() in ("y", "yes", "t", "true", "on", "1")
@ -83,13 +73,13 @@ def future_thread_executor(
args: list[tuple[Callable[..., Any], ...]], args: list[tuple[Callable[..., Any], ...]],
threads: int | None = None, threads: int | None = None,
override_threads: bool = False, override_threads: bool = False,
max_threads: int | None = None,
) -> list[Any]: ) -> list[Any]:
results: list[Any] = [] results: list[Any] = []
# Determine the number of workers, defaulting to 1 if os.cpu_count() returns None # Determine the number of workers, defaulting to 1 if os.cpu_count() returns None
max_threads_env: int = int(os.getenv("MAX_THREADS", 32))
cpu_threads: int = os.cpu_count() or 1 # Default to 1 if os.cpu_count() is None cpu_threads: int = os.cpu_count() or 1 # Default to 1 if os.cpu_count() is None
workers: int = min(max_threads, cpu_threads * 2) if max_threads else cpu_threads * 2 workers: int = min(max_threads_env, cpu_threads * 2)
# Adjust workers based on threads parameter and override_threads flag # Adjust workers based on threads parameter and override_threads flag
if threads is not None: if threads is not None:
@ -126,13 +116,3 @@ def parse_string_to_list(string: str | None) -> list[str]:
output = string.split(",") output = string.split(",")
return output return output
_WINDOWS_DRIVE = re.compile(r"^[A-Za-z]:") # e.g. C: D:
def filename_from_any_path(p: str) -> str:
# Windows-y if UNC (\\server\share), drive letter, or has backslashes
if p.startswith("\\\\") or _WINDOWS_DRIVE.match(p) or ("\\" in p and "/" not in p):
return PureWindowsPath(p).name
return PurePosixPath(p).name

View File

@ -1,10 +1,9 @@
from src.jellyfin_emby import JellyfinEmby from src.jellyfin_emby import JellyfinEmby
from packaging.version import parse, Version from packaging.version import parse, Version
from loguru import logger
class Jellyfin(JellyfinEmby): class Jellyfin(JellyfinEmby):
def __init__(self, env, base_url: str, token: str) -> None: def __init__(self, baseurl, token):
authorization = ( authorization = (
"MediaBrowser , " "MediaBrowser , "
'Client="JellyPlex-Watched", ' 'Client="JellyPlex-Watched", '
@ -19,14 +18,8 @@ class Jellyfin(JellyfinEmby):
} }
super().__init__( super().__init__(
env, server_type="Jellyfin", base_url=base_url, token=token, headers=headers server_type="Jellyfin", baseurl=baseurl, token=token, headers=headers
) )
def is_partial_update_supported(self, server_version: Version) -> bool: def is_partial_update_supported(self, server_version: Version) -> bool:
if not server_version >= parse("10.9.0"): return server_version >= parse("10.9.0")
logger.info(
f"{self.server_type}: Server version {server_version} does not support updating playback position.",
)
return False
return True

View File

@ -1,19 +1,18 @@
# Functions for Jellyfin and Emby # Functions for Jellyfin and Emby
from datetime import datetime
import requests import requests
import traceback import traceback
import os
from math import floor from math import floor
from typing import Any, Literal from typing import Any, Literal
from dotenv import load_dotenv
from packaging.version import parse, Version from packaging.version import parse, Version
from loguru import logger from loguru import logger
from src.functions import ( from src.functions import (
filename_from_any_path,
search_mapping, search_mapping,
log_marked, log_marked,
str_to_bool, str_to_bool,
get_env_value,
) )
from src.watched import ( from src.watched import (
LibraryData, LibraryData,
@ -25,76 +24,54 @@ from src.watched import (
check_same_identifiers, check_same_identifiers,
) )
load_dotenv(override=True)
def extract_identifiers_from_item( generate_guids = str_to_bool(os.getenv("GENERATE_GUIDS", "True"))
server_type: str, generate_locations = str_to_bool(os.getenv("GENERATE_LOCATIONS", "True"))
item: dict[str, Any],
generate_guids: bool,
generate_locations: bool, def extract_identifiers_from_item(server_type, item: dict) -> MediaIdentifiers:
) -> MediaIdentifiers: title = item.get("Name", None)
title = item.get("Name")
id = None id = None
if not title: if not title:
id = item.get("Id") id = item.get("Id")
logger.debug(f"{server_type}: Name not found for {id}") logger.info(f"{server_type}: Name not found in {id}")
guids = {} guids = {}
if generate_guids: if generate_guids:
guids = {k.lower(): v for k, v in item.get("ProviderIds", {}).items()} guids = {k.lower(): v for k, v in item["ProviderIds"].items()}
locations: tuple[str, ...] = tuple()
full_path: str = ""
if generate_locations:
if item.get("Path"):
full_path = item["Path"]
locations = tuple([filename_from_any_path(full_path)])
elif item.get("MediaSources"):
full_paths = [x["Path"] for x in item["MediaSources"] if x.get("Path")]
locations = tuple([filename_from_any_path(x) for x in full_paths])
full_path = " ".join(full_paths)
if generate_guids:
if not guids: if not guids:
logger.debug( logger.info(
f"{server_type}: {title if title else id} has no guids{f', locations: {full_path}' if full_path else ''}", f"{server_type}: {title if title else id} has no guids",
) )
locations = tuple()
if generate_locations: if generate_locations:
if not locations: if "Path" in item:
logger.debug( locations = tuple([item.get("Path").split("/")[-1]])
f"{server_type}: {title if title else id} has no locations{f', guids: {guids}' if guids else ''}", elif "MediaSources" in item:
locations = tuple(
[x["Path"].split("/")[-1] for x in item["MediaSources"] if "Path" in x]
) )
if not locations:
logger.info(f"{server_type}: {title if title else id} has no locations")
return MediaIdentifiers( return MediaIdentifiers(
title=title, title=title,
locations=locations, locations=locations,
imdb_id=guids.get("imdb"), imdb_id=guids.get("imdb", None),
tvdb_id=guids.get("tvdb"), tvdb_id=guids.get("tvdb", None),
tmdb_id=guids.get("tmdb"), tmdb_id=guids.get("tmdb", None),
) )
def get_mediaitem( def get_mediaitem(server_type, item: dict) -> MediaItem:
server_type: str,
item: dict[str, Any],
generate_guids: bool,
generate_locations: bool,
) -> MediaItem:
user_data = item.get("UserData", {})
last_played_date = user_data.get("LastPlayedDate")
viewed_date = datetime.today()
if last_played_date:
viewed_date = datetime.fromisoformat(last_played_date.replace("Z", "+00:00"))
return MediaItem( return MediaItem(
identifiers=extract_identifiers_from_item( identifiers=extract_identifiers_from_item(server_type, item),
server_type, item, generate_guids, generate_locations
),
status=WatchedStatus( status=WatchedStatus(
completed=user_data.get("Played"), completed=item["UserData"]["Played"],
time=floor(user_data.get("PlaybackPositionTicks", 0) / 10000), time=floor(item["UserData"]["PlaybackPositionTicks"] / 10000),
viewed_date=viewed_date,
), ),
) )
@ -102,41 +79,28 @@ def get_mediaitem(
class JellyfinEmby: class JellyfinEmby:
def __init__( def __init__(
self, self,
env,
server_type: Literal["Jellyfin", "Emby"], server_type: Literal["Jellyfin", "Emby"],
base_url: str, baseurl: str,
token: str, token: str,
headers: dict[str, str], headers: dict[str, str],
) -> None: ):
self.env = env
if server_type not in ["Jellyfin", "Emby"]: if server_type not in ["Jellyfin", "Emby"]:
raise Exception(f"Server type {server_type} not supported") raise Exception(f"Server type {server_type} not supported")
self.server_type: str = server_type self.server_type = server_type
self.base_url: str = base_url self.baseurl = baseurl
self.token: str = token self.token = token
self.headers: dict[str, str] = headers self.headers = headers
self.timeout: int = int(get_env_value(self.env, "REQUEST_TIMEOUT", 300)) self.timeout = int(os.getenv("REQUEST_TIMEOUT", 300))
if not self.base_url: if not self.baseurl:
raise Exception(f"{self.server_type} base_url not set") raise Exception(f"{self.server_type} baseurl not set")
if not self.token: if not self.token:
raise Exception(f"{self.server_type} token not set") raise Exception(f"{self.server_type} token not set")
self.session = requests.Session() self.session = requests.Session()
self.users: dict[str, str] = self.get_users() self.users = self.get_users()
self.server_name: str = self.info(name_only=True) self.server_name = self.info(name_only=True)
self.server_version: Version = self.info(version_only=True)
self.update_partial: bool = self.is_partial_update_supported(
self.server_version
)
self.generate_guids: bool = str_to_bool(
get_env_value(self.env, "GENERATE_GUIDS", "True")
)
self.generate_locations: bool = str_to_bool(
get_env_value(self.env, "GENERATE_LOCATIONS", "True")
)
def query( def query(
self, self,
@ -144,13 +108,15 @@ class JellyfinEmby:
query_type: Literal["get", "post"], query_type: Literal["get", "post"],
identifiers: dict[str, str] | None = None, identifiers: dict[str, str] | None = None,
json: dict[str, float] | None = None, json: dict[str, float] | None = None,
) -> list[dict[str, Any]] | dict[str, Any] | None: ) -> dict[str, Any] | list[dict[str, Any]] | None:
try: try:
results = None results: (
dict[str, list[Any] | dict[str, str]] | list[dict[str, Any]] | None
) = None
if query_type == "get": if query_type == "get":
response = self.session.get( response = self.session.get(
self.base_url + query, headers=self.headers, timeout=self.timeout self.baseurl + query, headers=self.headers, timeout=self.timeout
) )
if response.status_code not in [200, 204]: if response.status_code not in [200, 204]:
raise Exception( raise Exception(
@ -163,7 +129,7 @@ class JellyfinEmby:
elif query_type == "post": elif query_type == "post":
response = self.session.post( response = self.session.post(
self.base_url + query, self.baseurl + query,
headers=self.headers, headers=self.headers,
json=json, json=json,
timeout=self.timeout, timeout=self.timeout,
@ -177,12 +143,12 @@ class JellyfinEmby:
else: else:
results = response.json() results = response.json()
if results: if results is not None:
if not isinstance(results, list) and not isinstance(results, dict): if not isinstance(results, list) and not isinstance(results, dict):
raise Exception("Query result is not of type list or dict") raise Exception("Query result is not of type list or dict")
# append identifiers to results # append identifiers to results
if identifiers and isinstance(results, dict): if identifiers and results:
results["Identifiers"] = identifiers results["Identifiers"] = identifiers
return results return results
@ -199,13 +165,13 @@ class JellyfinEmby:
try: try:
query_string = "/System/Info/Public" query_string = "/System/Info/Public"
response = self.query(query_string, "get") response: dict[str, Any] = self.query(query_string, "get")
if response and isinstance(response, dict): if response:
if name_only: if name_only:
return response.get("ServerName") return response["ServerName"]
elif version_only: elif version_only:
return parse(response.get("Version", "")) return parse(response["Version"])
return f"{self.server_type} {response.get('ServerName')}: {response.get('Version')}" return f"{self.server_type} {response.get('ServerName')}: {response.get('Version')}"
else: else:
@ -220,10 +186,12 @@ class JellyfinEmby:
users: dict[str, str] = {} users: dict[str, str] = {}
query_string = "/Users" query_string = "/Users"
response = self.query(query_string, "get") response: list[dict[str, str | bool]] = self.query(query_string, "get")
if response and isinstance(response, list): # If response is not empty
if response:
for user in response: for user in response:
if isinstance(user["Name"], str) and isinstance(user["Id"], str):
users[user["Name"]] = user["Id"] users[user["Name"]] = user["Id"]
return users return users
@ -233,61 +201,19 @@ class JellyfinEmby:
def get_libraries(self) -> dict[str, str]: def get_libraries(self) -> dict[str, str]:
try: try:
libraries: dict[str, str] = {} libraries = {}
# Theres no way to get all libraries so individually get list of libraries from all users # Theres no way to get all libraries so individually get list of libraries from all users
users = self.get_users() users = self.get_users()
for user_name, user_id in users.items(): for user_name, user_id in users.items():
user_libraries = self.query(f"/Users/{user_id}/Views", "get") user_libraries: dict = self.query(f"/Users/{user_id}/Views", "get")
logger.debug(f"{self.server_type}: All Libraries for {user_name} {[library.get("Name") for library in user_libraries["Items"]]}")
if not user_libraries or not isinstance(user_libraries, dict): for library in user_libraries["Items"]:
logger.error( library_title = library["Name"]
f"{self.server_type}: Failed to get libraries for {user_name}"
)
return libraries
logger.debug(
f"{self.server_type}: All Libraries for {user_name} {[library.get('Name') for library in user_libraries.get('Items', [])]}"
)
for library in user_libraries.get("Items", []):
library_title = library.get("Name")
library_type = library.get("CollectionType") library_type = library.get("CollectionType")
# If collection type is not set, fallback based on media files
if not library_type:
library_id = library.get("Id")
# Get first 100 items in library
library_items = self.query(
f"/Users/{user_id}/Items"
+ f"?ParentId={library_id}&Recursive=True&excludeItemTypes=Folder&limit=100",
"get",
)
if not library_items or not isinstance(library_items, dict):
logger.debug(
f"{self.server_type}: Failed to get library items for {user_name} {library_title}"
)
continue
all_types = set(
[x.get("Type") for x in library_items.get("Items", [])]
)
types = set([x for x in all_types if x in ["Movie", "Episode"]])
if not len(types) == 1:
logger.debug(
f"{self.server_type}: Skipping Library {library_title} didn't find just a single type, found {all_types}",
)
continue
library_type = types.pop()
library_type = (
"movies" if library_type == "Movie" else "tvshows"
)
if library_type not in ["movies", "tvshows"]: if library_type not in ["movies", "tvshows"]:
logger.debug( logger.debug(
f"{self.server_type}: Skipping Library {library_title} found type {library_type}", f"{self.server_type}: Skipping Library {library_title} found type {library_type}",
@ -302,12 +228,7 @@ class JellyfinEmby:
raise Exception(e) raise Exception(e)
def get_user_library_watched( def get_user_library_watched(
self, self, user_name, user_id, library_type, library_id, library_title
user_name: str,
user_id: str,
library_type: Literal["movies", "tvshows"],
library_id: str,
library_title: str,
) -> LibraryData: ) -> LibraryData:
user_name = user_name.lower() user_name = user_name.lower()
try: try:
@ -317,136 +238,89 @@ class JellyfinEmby:
watched = LibraryData(title=library_title) watched = LibraryData(title=library_title)
# Movies # Movies
if library_type == "movies": if library_type == "Movie":
movie_items = []
watched_items = self.query( watched_items = self.query(
f"/Users/{user_id}/Items" f"/Users/{user_id}/Items"
+ f"?ParentId={library_id}&Filters=IsPlayed&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources,UserDataLastPlayedDate", + f"?ParentId={library_id}&Filters=IsPlayed&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources",
"get", "get",
) ).get("Items", [])
if watched_items and isinstance(watched_items, dict):
movie_items += watched_items.get("Items", [])
in_progress_items = self.query( in_progress_items = self.query(
f"/Users/{user_id}/Items" f"/Users/{user_id}/Items"
+ f"?ParentId={library_id}&Filters=IsResumable&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources,UserDataLastPlayedDate", + f"?ParentId={library_id}&Filters=IsResumable&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources",
"get", "get",
) ).get("Items", [])
if in_progress_items and isinstance(in_progress_items, dict): for movie in watched_items + in_progress_items:
movie_items += in_progress_items.get("Items", [])
for movie in movie_items:
# Skip if theres no user data which means the movie has not been watched # Skip if theres no user data which means the movie has not been watched
if not movie.get("UserData"): if "UserData" not in movie:
continue continue
# Skip if theres no media tied to the movie # Skip if theres no media tied to the movie
if not movie.get("MediaSources"): if "MediaSources" not in movie or movie["MediaSources"] == {}:
continue continue
# Skip if not watched or watched less than a minute # Skip if not watched or watched less than a minute
if ( if (
movie["UserData"].get("Played") movie["UserData"]["Played"] == True
or movie["UserData"].get("PlaybackPositionTicks", 0) > 600000000 or movie["UserData"]["PlaybackPositionTicks"] > 600000000
): ):
watched.movies.append( watched.movies.append(get_mediaitem(self.server_type, movie))
get_mediaitem(
self.server_type,
movie,
self.generate_guids,
self.generate_locations,
)
)
# TV Shows # TV Shows
if library_type == "tvshows": if library_type in ["Series", "Episode"]:
# Retrieve a list of watched TV shows # Retrieve a list of watched TV shows
all_shows = self.query( watched_shows = self.query(
f"/Users/{user_id}/Items" f"/Users/{user_id}/Items"
+ f"?ParentId={library_id}&isPlaceHolder=false&IncludeItemTypes=Series&Recursive=True&Fields=ProviderIds,Path,RecursiveItemCount", + f"?ParentId={library_id}&isPlaceHolder=false&IncludeItemTypes=Series&Recursive=True&Fields=ProviderIds,Path,RecursiveItemCount",
"get", "get",
) ).get("Items", [])
if not all_shows or not isinstance(all_shows, dict):
logger.debug(
f"{self.server_type}: Failed to get shows for {user_name} in {library_title}"
)
return watched
# Filter the list of shows to only include those that have been partially or fully watched # Filter the list of shows to only include those that have been partially or fully watched
watched_shows_filtered = [] watched_shows_filtered = []
for show in all_shows.get("Items", []): for show in watched_shows:
if not show.get("UserData"): if "UserData" not in show:
continue continue
played_percentage = show["UserData"].get("PlayedPercentage") if "PlayedPercentage" in show["UserData"]:
if played_percentage is None: if show["UserData"]["PlayedPercentage"] > 0:
# Emby no longer shows PlayedPercentage
total_episodes = show.get("RecursiveItemCount")
unplayed_episodes = show["UserData"].get("UnplayedItemCount")
if total_episodes is None:
# Failed to get total count of episodes
continue
if (
unplayed_episodes is not None
and unplayed_episodes < total_episodes
):
watched_shows_filtered.append(show)
else:
if played_percentage > 0:
watched_shows_filtered.append(show) watched_shows_filtered.append(show)
# Retrieve the watched/partially watched list of episodes of each watched show # Retrieve the watched/partially watched list of episodes of each watched show
for show in watched_shows_filtered: for show in watched_shows_filtered:
show_name = show.get("Name") show_guids = {k.lower(): v for k, v in show["ProviderIds"].items()}
show_guids = {
k.lower(): v for k, v in show.get("ProviderIds", {}).items()
}
show_locations = ( show_locations = (
tuple([filename_from_any_path(show["Path"])]) tuple([show["Path"].split("/")[-1]])
if show.get("Path") if "Path" in show
else tuple() else tuple()
) )
show_episodes = self.query( show_episodes = self.query(
f"/Shows/{show.get('Id')}/Episodes" f"/Shows/{show['Id']}/Episodes"
+ f"?userId={user_id}&isPlaceHolder=false&Fields=ProviderIds,MediaSources,UserDataLastPlayedDate", + f"?userId={user_id}&isPlaceHolder=false&Fields=ProviderIds,MediaSources",
"get", "get",
) ).get("Items", [])
if not show_episodes or not isinstance(show_episodes, dict):
logger.debug(
f"{self.server_type}: Failed to get episodes for {user_name} {library_title} {show_name}"
)
continue
# Iterate through the episodes # Iterate through the episodes
# Create a list to store the episodes # Create a list to store the episodes
episode_mediaitem = [] episode_mediaitem = []
for episode in show_episodes.get("Items", []): for episode in show_episodes:
if not episode.get("UserData"): if "UserData" not in episode:
continue continue
if not episode.get("MediaSources"): if (
"MediaSources" not in episode
or episode["MediaSources"] == {}
):
continue continue
# If watched or watched more than a minute # If watched or watched more than a minute
if ( if (
episode["UserData"].get("Played") episode["UserData"]["Played"] == True
or episode["UserData"].get("PlaybackPositionTicks", 0) or episode["UserData"]["PlaybackPositionTicks"] > 600000000
> 600000000
): ):
episode_mediaitem.append( episode_mediaitem.append(
get_mediaitem( get_mediaitem(self.server_type, episode)
self.server_type,
episode,
self.generate_guids,
self.generate_locations,
)
) )
if episode_mediaitem: if episode_mediaitem:
@ -455,9 +329,9 @@ class JellyfinEmby:
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
title=show.get("Name"), title=show.get("Name"),
locations=show_locations, locations=show_locations,
imdb_id=show_guids.get("imdb"), imdb_id=show_guids.get("imdb", None),
tvdb_id=show_guids.get("tvdb"), tvdb_id=show_guids.get("tvdb", None),
tmdb_id=show_guids.get("tmdb"), tmdb_id=show_guids.get("tmdb", None),
), ),
episodes=episode_mediaitem, episodes=episode_mediaitem,
) )
@ -474,49 +348,55 @@ class JellyfinEmby:
) )
logger.error(traceback.format_exc()) logger.error(traceback.format_exc())
return LibraryData(title=library_title) return {}
def get_watched( def get_watched(
self, self, users: dict[str, str], sync_libraries: list[str]
users: dict[str, str],
sync_libraries: list[str],
users_watched: dict[str, UserData] = None,
) -> dict[str, UserData]: ) -> dict[str, UserData]:
try: try:
if not users_watched:
users_watched: dict[str, UserData] = {} users_watched: dict[str, UserData] = {}
for user_name, user_id in users.items(): for user_name, user_id in users.items():
if user_name.lower() not in users_watched: libraries = []
users_watched[user_name.lower()] = UserData()
all_libraries = self.query(f"/Users/{user_id}/Views", "get") all_libraries = self.query(f"/Users/{user_id}/Views", "get")
if not all_libraries or not isinstance(all_libraries, dict): for library in all_libraries["Items"]:
logger.debug( library_id = library["Id"]
f"{self.server_type}: Failed to get all libraries for {user_name}" library_title = library["Name"]
)
continue
for library in all_libraries.get("Items", []):
library_id = library.get("Id")
library_title = library.get("Name")
library_type = library.get("CollectionType")
if not library_id or not library_title or not library_type:
logger.debug(
f"{self.server_type}: Failed to get library data for {user_name} {library_title}"
)
continue
if library_title not in sync_libraries: if library_title not in sync_libraries:
continue continue
if library_title in users_watched: identifiers: dict[str, str] = {
logger.info( "library_id": library_id,
f"{self.server_type}: {user_name} {library_title} watched history has already been gathered, skipping" "library_title": library_title,
}
libraries.append(
self.query(
f"/Users/{user_id}/Items"
+ f"?ParentId={library_id}&Filters=IsPlayed&Recursive=True&excludeItemTypes=Folder&limit=100",
"get",
identifiers=identifiers,
) )
)
for library in libraries:
if len(library["Items"]) == 0:
continue continue
library_id: str = library["Identifiers"]["library_id"]
library_title: str = library["Identifiers"]["library_title"]
# Get all library types excluding "Folder"
types = set(
[
x["Type"]
for x in library["Items"]
if x["Type"] in ["Movie", "Series", "Episode"]
]
)
for library_type in types:
# Get watched for user # Get watched for user
library_data = self.get_user_library_watched( library_data = self.get_user_library_watched(
user_name, user_name,
@ -536,7 +416,7 @@ class JellyfinEmby:
return users_watched return users_watched
except Exception as e: except Exception as e:
logger.error(f"{self.server_type}: Failed to get watched, Error: {e}") logger.error(f"{self.server_type}: Failed to get watched, Error: {e}")
return {} raise Exception(e)
def update_user_watched( def update_user_watched(
self, self,
@ -545,8 +425,9 @@ class JellyfinEmby:
library_data: LibraryData, library_data: LibraryData,
library_name: str, library_name: str,
library_id: str, library_id: str,
update_partial: bool,
dryrun: bool, dryrun: bool,
) -> None: ):
try: try:
# If there are no movies or shows to update, exit early. # If there are no movies or shows to update, exit early.
if not library_data.series and not library_data.movies: if not library_data.series and not library_data.movies:
@ -564,48 +445,22 @@ class JellyfinEmby:
+ "&isPlayed=false&Fields=ItemCounts,ProviderIds,MediaSources&IncludeItemTypes=Movie", + "&isPlayed=false&Fields=ItemCounts,ProviderIds,MediaSources&IncludeItemTypes=Movie",
"get", "get",
) )
for jellyfin_video in jellyfin_search["Items"]:
if not jellyfin_search or not isinstance(jellyfin_search, dict):
logger.debug(
f"{self.server_type}: Failed to get movies for {user_name} {library_name}"
)
return
for jellyfin_video in jellyfin_search.get("Items", []):
jelly_identifiers = extract_identifiers_from_item( jelly_identifiers = extract_identifiers_from_item(
self.server_type, self.server_type, jellyfin_video
jellyfin_video,
self.generate_guids,
self.generate_locations,
) )
# Check each stored movie for a match. # Check each stored movie for a match.
for stored_movie in library_data.movies: for stored_movie in library_data.movies:
if check_same_identifiers( if check_same_identifiers(
jelly_identifiers, stored_movie.identifiers jelly_identifiers, stored_movie.identifiers
): ):
jellyfin_video_id = jellyfin_video.get("Id") jellyfin_video_id = jellyfin_video["Id"]
viewed_date: str = (
stored_movie.status.viewed_date.isoformat(
timespec="milliseconds"
).replace("+00:00", "Z")
)
if stored_movie.status.completed: if stored_movie.status.completed:
msg = f"{self.server_type}: {jellyfin_video.get('Name')} as watched for {user_name} in {library_name}" msg = f"{self.server_type}: {jellyfin_video.get('Name')} as watched for {user_name} in {library_name}"
if not dryrun: if not dryrun:
user_data_payload: dict[
str, float | bool | datetime
] = {
"PlayCount": 1,
"Played": True,
"PlaybackPositionTicks": 0,
"LastPlayedDate": viewed_date,
}
self.query( self.query(
f"/Users/{user_id}/Items/{jellyfin_video_id}/UserData", f"/Users/{user_id}/PlayedItems/{jellyfin_video_id}",
"post", "post",
json=user_data_payload,
) )
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}") logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
@ -615,27 +470,19 @@ class JellyfinEmby:
user_name, user_name,
library_name, library_name,
jellyfin_video.get("Name"), jellyfin_video.get("Name"),
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
) )
elif self.update_partial: elif update_partial:
msg = f"{self.server_type}: {jellyfin_video.get('Name')} as partially watched for {floor(stored_movie.status.time / 60_000)} minutes for {user_name} in {library_name}" msg = f"{self.server_type}: {jellyfin_video.get('Name')} as partially watched for {floor(stored_movie.status.time / 60_000)} minutes for {user_name} in {library_name}"
if not dryrun: if not dryrun:
user_data_payload: dict[ playback_position_payload = {
str, float | bool | datetime
] = {
"PlayCount": 0,
"Played": False,
"PlaybackPositionTicks": stored_movie.status.time "PlaybackPositionTicks": stored_movie.status.time
* 10_000, * 10_000,
"LastPlayedDate": viewed_date,
} }
self.query( self.query(
f"/Users/{user_id}/Items/{jellyfin_video_id}/UserData", f"/Users/{user_id}/Items/{jellyfin_video_id}/UserData",
"post", "post",
json=user_data_payload, json=playback_position_payload,
) )
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}") logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
@ -646,9 +493,6 @@ class JellyfinEmby:
library_name, library_name,
jellyfin_video.get("Name"), jellyfin_video.get("Name"),
duration=floor(stored_movie.status.time / 60_000), duration=floor(stored_movie.status.time / 60_000),
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
) )
else: else:
logger.trace( logger.trace(
@ -663,53 +507,33 @@ class JellyfinEmby:
+ "&Fields=ItemCounts,ProviderIds,Path&IncludeItemTypes=Series", + "&Fields=ItemCounts,ProviderIds,Path&IncludeItemTypes=Series",
"get", "get",
) )
if not jellyfin_search or not isinstance(jellyfin_search, dict): jellyfin_shows = [x for x in jellyfin_search["Items"]]
logger.debug(
f"{self.server_type}: Failed to get shows for {user_name} {library_name}"
)
return
jellyfin_shows = [x for x in jellyfin_search.get("Items", [])]
for jellyfin_show in jellyfin_shows: for jellyfin_show in jellyfin_shows:
jellyfin_show_identifiers = extract_identifiers_from_item( jellyfin_show_identifiers = extract_identifiers_from_item(
self.server_type, self.server_type, jellyfin_show
jellyfin_show,
self.generate_guids,
self.generate_locations,
) )
# Try to find a matching series in your stored library. # Try to find a matching series in your stored library.
for stored_series in library_data.series: for stored_series in library_data.series:
if check_same_identifiers( if check_same_identifiers(
jellyfin_show_identifiers, stored_series.identifiers jellyfin_show_identifiers, stored_series.identifiers
): ):
logger.trace( logger.info(
f"Found matching show for '{jellyfin_show.get('Name')}'", f"Found matching show for '{jellyfin_show.get('Name')}'",
) )
# Now update episodes. # Now update episodes.
# Get the list of Plex episodes for this show. # Get the list of Plex episodes for this show.
jellyfin_show_id = jellyfin_show.get("Id") jellyfin_show_id = jellyfin_show["Id"]
jellyfin_episodes = self.query( jellyfin_episodes = self.query(
f"/Shows/{jellyfin_show_id}/Episodes" f"/Shows/{jellyfin_show_id}/Episodes"
+ f"?userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources", + f"?userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources",
"get", "get",
) )
if not jellyfin_episodes or not isinstance( for jellyfin_episode in jellyfin_episodes["Items"]:
jellyfin_episodes, dict
):
logger.debug(
f"{self.server_type}: Failed to get episodes for {user_name} {library_name} {jellyfin_show.get('Name')}"
)
return
for jellyfin_episode in jellyfin_episodes.get("Items", []):
jellyfin_episode_identifiers = ( jellyfin_episode_identifiers = (
extract_identifiers_from_item( extract_identifiers_from_item(
self.server_type, self.server_type, jellyfin_episode
jellyfin_episode,
self.generate_guids,
self.generate_locations,
) )
) )
for stored_ep in stored_series.episodes: for stored_ep in stored_series.episodes:
@ -717,32 +541,16 @@ class JellyfinEmby:
jellyfin_episode_identifiers, jellyfin_episode_identifiers,
stored_ep.identifiers, stored_ep.identifiers,
): ):
jellyfin_episode_id = jellyfin_episode.get("Id") jellyfin_episode_id = jellyfin_episode["Id"]
viewed_date: str = (
stored_ep.status.viewed_date.isoformat(
timespec="milliseconds"
).replace("+00:00", "Z")
)
if stored_ep.status.completed: if stored_ep.status.completed:
msg = ( msg = (
f"{self.server_type}: {jellyfin_episode.get('SeriesName')} {jellyfin_episode.get('SeasonName')} Episode {jellyfin_episode.get('IndexNumber')} {jellyfin_episode.get('Name')}" f"{self.server_type}: {jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode.get('IndexNumber')} {jellyfin_episode.get('Name')}"
+ f" as watched for {user_name} in {library_name}" + f" as watched for {user_name} in {library_name}"
) )
if not dryrun: if not dryrun:
user_data_payload: dict[
str, float | bool | datetime
] = {
"PlayCount": 1,
"Played": True,
"PlaybackPositionTicks": 0,
"LastPlayedDate": viewed_date,
}
self.query( self.query(
f"/Users/{user_id}/Items/{jellyfin_episode_id}/UserData", f"/Users/{user_id}/PlayedItems/{jellyfin_episode_id}",
"post", "post",
json=user_data_payload,
) )
logger.success( logger.success(
@ -755,30 +563,22 @@ class JellyfinEmby:
library_name, library_name,
jellyfin_episode.get("SeriesName"), jellyfin_episode.get("SeriesName"),
jellyfin_episode.get("Name"), jellyfin_episode.get("Name"),
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
) )
elif self.update_partial: elif update_partial:
msg = ( msg = (
f"{self.server_type}: {jellyfin_episode.get('SeriesName')} {jellyfin_episode.get('SeasonName')} Episode {jellyfin_episode.get('IndexNumber')} {jellyfin_episode.get('Name')}" f"{self.server_type}: {jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode.get('IndexNumber')} {jellyfin_episode.get('Name')}"
+ f" as partially watched for {floor(stored_ep.status.time / 60_000)} minutes for {user_name} in {library_name}" + f" as partially watched for {floor(stored_ep.status.time / 60_000)} minutes for {user_name} in {library_name}"
) )
if not dryrun: if not dryrun:
user_data_payload: dict[ playback_position_payload = {
str, float | bool | datetime
] = {
"PlayCount": 0,
"Played": False,
"PlaybackPositionTicks": stored_ep.status.time "PlaybackPositionTicks": stored_ep.status.time
* 10_000, * 10_000,
"LastPlayedDate": viewed_date,
} }
self.query( self.query(
f"/Users/{user_id}/Items/{jellyfin_episode_id}/UserData", f"/Users/{user_id}/Items/{jellyfin_episode_id}/UserData",
"post", "post",
json=user_data_payload, json=playback_position_payload,
) )
logger.success( logger.success(
@ -794,9 +594,6 @@ class JellyfinEmby:
duration=floor( duration=floor(
stored_ep.status.time / 60_000 stored_ep.status.time / 60_000
), ),
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
) )
else: else:
logger.trace( logger.trace(
@ -811,14 +608,25 @@ class JellyfinEmby:
logger.error( logger.error(
f"{self.server_type}: Error updating watched for {user_name} in library {library_name}, {e}", f"{self.server_type}: Error updating watched for {user_name} in library {library_name}, {e}",
) )
logger.error(traceback.format_exc())
raise Exception(e)
def update_watched( def update_watched(
self, self,
watched_list: dict[str, UserData], watched_list: dict[str, UserData],
user_mapping: dict[str, str] | None = None, user_mapping=None,
library_mapping: dict[str, str] | None = None, library_mapping=None,
dryrun: bool = False, dryrun=False,
) -> None: ):
try:
server_version = self.info(version_only=True)
update_partial = self.is_partial_update_supported(server_version)
if not update_partial:
logger.info(
f"{self.server_type}: Server version {server_version} does not support updating playback position.",
)
for user, user_data in watched_list.items(): for user, user_data in watched_list.items():
user_other = None user_other = None
user_name = None user_name = None
@ -839,7 +647,7 @@ class JellyfinEmby:
user_name = key user_name = key
break break
if not user_id or not user_name: if not user_id:
logger.info(f"{user} {user_other} not found in Jellyfin") logger.info(f"{user} {user_other} not found in Jellyfin")
continue continue
@ -847,14 +655,7 @@ class JellyfinEmby:
f"/Users/{user_id}/Views", f"/Users/{user_id}/Views",
"get", "get",
) )
jellyfin_libraries = [x for x in jellyfin_libraries["Items"]]
if not jellyfin_libraries or not isinstance(jellyfin_libraries, dict):
logger.debug(
f"{self.server_type}: Failed to get libraries for {user_name}"
)
continue
jellyfin_libraries = [x for x in jellyfin_libraries.get("Items", [])]
for library_name in user_data.libraries: for library_name in user_data.libraries:
library_data = user_data.libraries[library_name] library_data = user_data.libraries[library_name]
@ -863,7 +664,9 @@ class JellyfinEmby:
if library_name in library_mapping.keys(): if library_name in library_mapping.keys():
library_other = library_mapping[library_name] library_other = library_mapping[library_name]
elif library_name in library_mapping.values(): elif library_name in library_mapping.values():
library_other = search_mapping(library_mapping, library_name) library_other = search_mapping(
library_mapping, library_name
)
if library_name.lower() not in [ if library_name.lower() not in [
x["Name"].lower() for x in jellyfin_libraries x["Name"].lower() for x in jellyfin_libraries
@ -889,21 +692,21 @@ class JellyfinEmby:
library_id = None library_id = None
for jellyfin_library in jellyfin_libraries: for jellyfin_library in jellyfin_libraries:
if jellyfin_library["Name"].lower() == library_name.lower(): if jellyfin_library["Name"] == library_name:
library_id = jellyfin_library["Id"] library_id = jellyfin_library["Id"]
continue continue
if library_id: if library_id:
try:
self.update_user_watched( self.update_user_watched(
user_name, user_name,
user_id, user_id,
library_data, library_data,
library_name, library_name,
library_id, library_id,
update_partial,
dryrun, dryrun,
) )
except Exception as e: except Exception as e:
logger.error( logger.error(f"{self.server_type}: Error updating watched, {e}")
f"{self.server_type}: Error updating watched for {user_name} in library {library_name}, {e}", raise Exception(e)
)

View File

@ -5,10 +5,6 @@ from src.functions import (
search_mapping, search_mapping,
) )
from src.emby import Emby
from src.jellyfin import Jellyfin
from src.plex import Plex
def check_skip_logic( def check_skip_logic(
library_title: str, library_title: str,
@ -58,7 +54,7 @@ def check_blacklist_logic(
blacklist_library: list[str], blacklist_library: list[str],
blacklist_library_type: list[str], blacklist_library_type: list[str],
library_other: str | None = None, library_other: str | None = None,
) -> str | None: ):
skip_reason = None skip_reason = None
if isinstance(library_type, (list, tuple, set)): if isinstance(library_type, (list, tuple, set)):
for library_type_item in library_type: for library_type_item in library_type:
@ -94,7 +90,7 @@ def check_whitelist_logic(
whitelist_library: list[str], whitelist_library: list[str],
whitelist_library_type: list[str], whitelist_library_type: list[str],
library_other: str | None = None, library_other: str | None = None,
) -> str | None: ):
skip_reason = None skip_reason = None
if len(whitelist_library_type) > 0: if len(whitelist_library_type) > 0:
if isinstance(library_type, (list, tuple, set)): if isinstance(library_type, (list, tuple, set)):
@ -165,8 +161,8 @@ def filter_libaries(
def setup_libraries( def setup_libraries(
server_1: Plex | Jellyfin | Emby, server_1,
server_2: Plex | Jellyfin | Emby, server_2,
blacklist_library: list[str], blacklist_library: list[str],
blacklist_library_type: list[str], blacklist_library_type: list[str],
whitelist_library: list[str], whitelist_library: list[str],

View File

@ -2,7 +2,7 @@ import os
import traceback import traceback
import json import json
import sys import sys
from dotenv import dotenv_values from dotenv import load_dotenv
from time import sleep, perf_counter from time import sleep, perf_counter
from loguru import logger from loguru import logger
@ -13,69 +13,61 @@ from src.library import setup_libraries
from src.functions import ( from src.functions import (
parse_string_to_list, parse_string_to_list,
str_to_bool, str_to_bool,
get_env_value,
) )
from src.users import setup_users from src.users import setup_users
from src.watched import ( from src.watched import (
cleanup_watched, cleanup_watched,
merge_server_watched,
) )
from src.black_white import setup_black_white_lists from src.black_white import setup_black_white_lists
from src.connection import generate_server_connections from src.connection import generate_server_connections
load_dotenv(override=True)
def configure_logger(log_file: str = "log.log", debug_level: str = "INFO") -> None: log_file = os.getenv("LOG_FILE", os.getenv("LOGFILE", "log.log"))
level = os.getenv("DEBUG_LEVEL", "INFO").upper()
def configure_logger():
# Remove default logger to configure our own # Remove default logger to configure our own
logger.remove() logger.remove()
# Choose log level based on environment # Choose log level based on environment
# If in debug mode with a "debug" level, use DEBUG; otherwise, default to INFO. # If in debug mode with a "debug" level, use DEBUG; otherwise, default to INFO.
if debug_level not in ["INFO", "DEBUG", "TRACE"]: if level not in ["INFO", "DEBUG", "TRACE"]:
logger.add(sys.stdout) logger.add(sys.stdout)
raise Exception( raise Exception("Invalid DEBUG_LEVEL, please choose between INFO, DEBUG, TRACE")
f"Invalid DEBUG_LEVEL {debug_level}, please choose between INFO, DEBUG, TRACE"
)
# Add a sink for file logging and the console. # Add a sink for file logging and the console.
logger.add(log_file, level=debug_level, mode="w") logger.add(log_file, level=level, mode="w")
logger.add(sys.stdout, level=debug_level) logger.add(sys.stdout, level=level)
def should_sync_server( def should_sync_server(
env,
server_1: Plex | Jellyfin | Emby, server_1: Plex | Jellyfin | Emby,
server_2: Plex | Jellyfin | Emby, server_2: Plex | Jellyfin | Emby,
) -> bool: ) -> bool:
sync_from_plex_to_jellyfin = str_to_bool( sync_from_plex_to_jellyfin = str_to_bool(
get_env_value(env, "SYNC_FROM_PLEX_TO_JELLYFIN", "True") os.getenv("SYNC_FROM_PLEX_TO_JELLYFIN", "True")
)
sync_from_plex_to_plex = str_to_bool(
get_env_value(env, "SYNC_FROM_PLEX_TO_PLEX", "True")
)
sync_from_plex_to_emby = str_to_bool(
get_env_value(env, "SYNC_FROM_PLEX_TO_EMBY", "True")
) )
sync_from_plex_to_plex = str_to_bool(os.getenv("SYNC_FROM_PLEX_TO_PLEX", "True"))
sync_from_plex_to_emby = str_to_bool(os.getenv("SYNC_FROM_PLEX_TO_EMBY", "True"))
sync_from_jelly_to_plex = str_to_bool( sync_from_jelly_to_plex = str_to_bool(
get_env_value(env, "SYNC_FROM_JELLYFIN_TO_PLEX", "True") os.getenv("SYNC_FROM_JELLYFIN_TO_PLEX", "True")
) )
sync_from_jelly_to_jellyfin = str_to_bool( sync_from_jelly_to_jellyfin = str_to_bool(
get_env_value(env, "SYNC_FROM_JELLYFIN_TO_JELLYFIN", "True") os.getenv("SYNC_FROM_JELLYFIN_TO_JELLYFIN", "True")
) )
sync_from_jelly_to_emby = str_to_bool( sync_from_jelly_to_emby = str_to_bool(
get_env_value(env, "SYNC_FROM_JELLYFIN_TO_EMBY", "True") os.getenv("SYNC_FROM_JELLYFIN_TO_EMBY", "True")
) )
sync_from_emby_to_plex = str_to_bool( sync_from_emby_to_plex = str_to_bool(os.getenv("SYNC_FROM_EMBY_TO_PLEX", "True"))
get_env_value(env, "SYNC_FROM_EMBY_TO_PLEX", "True")
)
sync_from_emby_to_jellyfin = str_to_bool( sync_from_emby_to_jellyfin = str_to_bool(
get_env_value(env, "SYNC_FROM_EMBY_TO_JELLYFIN", "True") os.getenv("SYNC_FROM_EMBY_TO_JELLYFIN", "True")
)
sync_from_emby_to_emby = str_to_bool(
get_env_value(env, "SYNC_FROM_EMBY_TO_EMBY", "True")
) )
sync_from_emby_to_emby = str_to_bool(os.getenv("SYNC_FROM_EMBY_TO_EMBY", "True"))
if isinstance(server_1, Plex): if isinstance(server_1, Plex):
if isinstance(server_2, Jellyfin) and not sync_from_plex_to_jellyfin: if isinstance(server_2, Jellyfin) and not sync_from_plex_to_jellyfin:
@ -119,38 +111,32 @@ def should_sync_server(
return True return True
def main_loop(env) -> None: def main_loop():
dryrun = str_to_bool(get_env_value(env, "DRYRUN", "False")) dryrun = str_to_bool(os.getenv("DRYRUN", "False"))
logger.info(f"Dryrun: {dryrun}") logger.info(f"Dryrun: {dryrun}")
user_mapping_env = get_env_value(env, "USER_MAPPING", None) user_mapping = os.getenv("USER_MAPPING", None)
user_mapping = None if user_mapping:
if user_mapping_env: user_mapping = json.loads(user_mapping.lower())
user_mapping = json.loads(user_mapping_env.lower())
logger.info(f"User Mapping: {user_mapping}") logger.info(f"User Mapping: {user_mapping}")
library_mapping_env = get_env_value(env, "LIBRARY_MAPPING", None) library_mapping = os.getenv("LIBRARY_MAPPING", None)
library_mapping = None if library_mapping:
if library_mapping_env: library_mapping = json.loads(library_mapping)
library_mapping = json.loads(library_mapping_env)
logger.info(f"Library Mapping: {library_mapping}") logger.info(f"Library Mapping: {library_mapping}")
# Create (black/white)lists # Create (black/white)lists
logger.info("Creating (black/white)lists") logger.info("Creating (black/white)lists")
blacklist_library = parse_string_to_list( blacklist_library = parse_string_to_list(os.getenv("BLACKLIST_LIBRARY", None))
get_env_value(env, "BLACKLIST_LIBRARY", None) whitelist_library = parse_string_to_list(os.getenv("WHITELIST_LIBRARY", None))
)
whitelist_library = parse_string_to_list(
get_env_value(env, "WHITELIST_LIBRARY", None)
)
blacklist_library_type = parse_string_to_list( blacklist_library_type = parse_string_to_list(
get_env_value(env, "BLACKLIST_LIBRARY_TYPE", None) os.getenv("BLACKLIST_LIBRARY_TYPE", None)
) )
whitelist_library_type = parse_string_to_list( whitelist_library_type = parse_string_to_list(
get_env_value(env, "WHITELIST_LIBRARY_TYPE", None) os.getenv("WHITELIST_LIBRARY_TYPE", None)
) )
blacklist_users = parse_string_to_list(get_env_value(env, "BLACKLIST_USERS", None)) blacklist_users = parse_string_to_list(os.getenv("BLACKLIST_USERS", None))
whitelist_users = parse_string_to_list(get_env_value(env, "WHITELIST_USERS", None)) whitelist_users = parse_string_to_list(os.getenv("WHITELIST_USERS", None))
( (
blacklist_library, blacklist_library,
@ -172,22 +158,19 @@ def main_loop(env) -> None:
# Create server connections # Create server connections
logger.info("Creating server connections") logger.info("Creating server connections")
servers = generate_server_connections(env) servers = generate_server_connections()
for server_1 in servers: for server_1 in servers:
# If server is the final server in the list, then we are done with the loop # If server is the final server in the list, then we are done with the loop
if server_1 == servers[-1]: if server_1 == servers[-1]:
break break
# Store a copy of server_1_watched that way it can be used multiple times without having to regather everyones watch history every single time
server_1_watched = None
# Start server_2 at the next server in the list # Start server_2 at the next server in the list
for server_2 in servers[servers.index(server_1) + 1 :]: for server_2 in servers[servers.index(server_1) + 1 :]:
# Check if server 1 and server 2 are going to be synced in either direction, skip if not # Check if server 1 and server 2 are going to be synced in either direction, skip if not
if not should_sync_server( if not should_sync_server(server_1, server_2) and not should_sync_server(
env, server_1, server_2 server_2, server_1
) and not should_sync_server(env, server_2, server_1): ):
continue continue
logger.info(f"Server 1: {type(server_1)}: {server_1.info()}") logger.info(f"Server 1: {type(server_1)}: {server_1.info()}")
@ -212,16 +195,14 @@ def main_loop(env) -> None:
logger.info(f"Server 2 syncing libraries: {server_2_libraries}") logger.info(f"Server 2 syncing libraries: {server_2_libraries}")
logger.info("Creating watched lists", 1) logger.info("Creating watched lists", 1)
server_1_watched = server_1.get_watched( server_1_watched = server_1.get_watched(server_1_users, server_1_libraries)
server_1_users, server_1_libraries, server_1_watched
)
logger.info("Finished creating watched list server 1") logger.info("Finished creating watched list server 1")
server_2_watched = server_2.get_watched(server_2_users, server_2_libraries) server_2_watched = server_2.get_watched(server_2_users, server_2_libraries)
logger.info("Finished creating watched list server 2") logger.info("Finished creating watched list server 2")
logger.trace(f"Server 1 watched: {server_1_watched}") logger.debug(f"Server 1 watched: {server_1_watched}")
logger.trace(f"Server 2 watched: {server_2_watched}") logger.debug(f"Server 2 watched: {server_2_watched}")
logger.info("Cleaning Server 1 Watched", 1) logger.info("Cleaning Server 1 Watched", 1)
server_1_watched_filtered = cleanup_watched( server_1_watched_filtered = cleanup_watched(
@ -240,18 +221,8 @@ def main_loop(env) -> None:
f"server 2 watched that needs to be synced to server 1:\n{server_2_watched_filtered}", f"server 2 watched that needs to be synced to server 1:\n{server_2_watched_filtered}",
) )
if should_sync_server(env, server_2, server_1): if should_sync_server(server_2, server_1):
logger.info(f"Syncing {server_2.info()} -> {server_1.info()}") logger.info(f"Syncing {server_2.info()} -> {server_1.info()}")
# Add server_2_watched_filtered to server_1_watched that way the stored version isn't stale for the next server
if not dryrun:
server_1_watched = merge_server_watched(
server_1_watched,
server_2_watched_filtered,
user_mapping,
library_mapping,
)
server_1.update_watched( server_1.update_watched(
server_2_watched_filtered, server_2_watched_filtered,
user_mapping, user_mapping,
@ -259,7 +230,7 @@ def main_loop(env) -> None:
dryrun, dryrun,
) )
if should_sync_server(env, server_1, server_2): if should_sync_server(server_1, server_2):
logger.info(f"Syncing {server_1.info()} -> {server_2.info()}") logger.info(f"Syncing {server_1.info()} -> {server_2.info()}")
server_2.update_watched( server_2.update_watched(
server_1_watched_filtered, server_1_watched_filtered,
@ -270,25 +241,16 @@ def main_loop(env) -> None:
@logger.catch @logger.catch
def main() -> None: def main():
# Get environment variables run_only_once = str_to_bool(os.getenv("RUN_ONLY_ONCE", "False"))
env_file = get_env_value(None, "ENV_FILE", ".env") sleep_duration = float(os.getenv("SLEEP_DURATION", "3600"))
env = dotenv_values(env_file)
run_only_once = str_to_bool(get_env_value(env, "RUN_ONLY_ONCE", "False"))
sleep_duration = float(get_env_value(env, "SLEEP_DURATION", "3600"))
log_file = get_env_value(env, "LOG_FILE", "log.log")
debug_level = get_env_value(env, "DEBUG_LEVEL", "INFO")
if debug_level:
debug_level = debug_level.upper()
times: list[float] = [] times: list[float] = []
while True: while True:
try: try:
start = perf_counter() start = perf_counter()
# Reconfigure the logger on each loop so the logs are rotated on each run # Reconfigure the logger on each loop so the logs are rotated on each run
configure_logger(log_file, debug_level) configure_logger()
main_loop(env) main_loop()
end = perf_counter() end = perf_counter()
times.append(end - start) times.append(end - start)

View File

@ -1,5 +1,6 @@
from datetime import datetime, timezone import os
import requests import requests
from dotenv import load_dotenv
from loguru import logger from loguru import logger
from urllib3.poolmanager import PoolManager from urllib3.poolmanager import PoolManager
@ -9,15 +10,12 @@ from requests.adapters import HTTPAdapter as RequestsHTTPAdapter
from plexapi.video import Show, Episode, Movie from plexapi.video import Show, Episode, Movie
from plexapi.server import PlexServer from plexapi.server import PlexServer
from plexapi.myplex import MyPlexAccount, MyPlexUser from plexapi.myplex import MyPlexAccount
from plexapi.library import MovieSection, ShowSection
from src.functions import ( from src.functions import (
filename_from_any_path,
search_mapping, search_mapping,
log_marked, log_marked,
str_to_bool, str_to_bool,
get_env_value,
) )
from src.watched import ( from src.watched import (
LibraryData, LibraryData,
@ -29,12 +27,15 @@ from src.watched import (
check_same_identifiers, check_same_identifiers,
) )
load_dotenv(override=True)
generate_guids = str_to_bool(os.getenv("GENERATE_GUIDS", "True"))
generate_locations = str_to_bool(os.getenv("GENERATE_LOCATIONS", "True"))
# Bypass hostname validation for ssl. Taken from https://github.com/pkkid/python-plexapi/issues/143#issuecomment-775485186 # Bypass hostname validation for ssl. Taken from https://github.com/pkkid/python-plexapi/issues/143#issuecomment-775485186
class HostNameIgnoringAdapter(RequestsHTTPAdapter): class HostNameIgnoringAdapter(RequestsHTTPAdapter):
def init_poolmanager( def init_poolmanager(self, connections, maxsize, block=..., **pool_kwargs):
self, connections: int, maxsize: int | None, block=..., **pool_kwargs
) -> None:
self.poolmanager = PoolManager( self.poolmanager = PoolManager(
num_pools=connections, num_pools=connections,
maxsize=maxsize, maxsize=maxsize,
@ -44,9 +45,7 @@ class HostNameIgnoringAdapter(RequestsHTTPAdapter):
) )
def extract_guids_from_item( def extract_guids_from_item(item: Movie | Show | Episode) -> dict[str, str]:
item: Movie | Show | Episode, generate_guids: bool
) -> dict[str, str]:
# If GENERATE_GUIDS is set to False, then return an empty dict # If GENERATE_GUIDS is set to False, then return an empty dict
if not generate_guids: if not generate_guids:
return {} return {}
@ -54,127 +53,209 @@ def extract_guids_from_item(
guids: dict[str, str] = dict( guids: dict[str, str] = dict(
guid.id.split("://") guid.id.split("://")
for guid in item.guids for guid in item.guids
if guid.id and len(guid.id.strip()) > 0 if guid.id is not None and len(guid.id.strip()) > 0
) )
return guids return guids
def extract_identifiers_from_item( def extract_identifiers_from_item(item: Movie | Show | Episode) -> MediaIdentifiers:
item: Movie | Show | Episode, guids = extract_guids_from_item(item)
generate_guids: bool,
generate_locations: bool,
) -> MediaIdentifiers:
guids = extract_guids_from_item(item, generate_guids)
locations = (
tuple([filename_from_any_path(loc) for loc in item.locations])
if generate_locations
else tuple()
)
if generate_guids:
if not guids:
logger.debug(
f"Plex: {item.title} has no guids{f', locations: {" ".join(item.locations)}' if generate_locations else ''}",
)
if generate_locations:
if not locations:
logger.debug(
f"Plex: {item.title} has no locations{f', guids: {guids}' if generate_guids else ''}",
)
return MediaIdentifiers( return MediaIdentifiers(
title=item.title, title=item.title,
locations=locations, locations=(
imdb_id=guids.get("imdb"), tuple([location.split("/")[-1] for location in item.locations])
tvdb_id=guids.get("tvdb"), if generate_locations
tmdb_id=guids.get("tmdb"), else tuple()
),
imdb_id=guids.get("imdb", None),
tvdb_id=guids.get("tvdb", None),
tmdb_id=guids.get("tmdb", None),
) )
def get_mediaitem( def get_mediaitem(item: Movie | Episode, completed=True) -> MediaItem:
item: Movie | Episode,
completed: bool,
generate_guids: bool = True,
generate_locations: bool = True,
) -> MediaItem:
last_viewed_at = item.lastViewedAt
viewed_date = datetime.today()
if last_viewed_at:
viewed_date = last_viewed_at.replace(tzinfo=timezone.utc)
return MediaItem( return MediaItem(
identifiers=extract_identifiers_from_item( identifiers=extract_identifiers_from_item(item),
item, generate_guids, generate_locations status=WatchedStatus(completed=completed, time=item.viewOffset),
),
status=WatchedStatus(
completed=completed, time=item.viewOffset, viewed_date=viewed_date
),
) )
def update_user_watched(
user: MyPlexAccount,
user_plex: PlexServer,
library_data: LibraryData,
library_name: str,
dryrun: bool,
):
try:
# If there are no movies or shows to update, exit early.
if not library_data.series and not library_data.movies:
return
logger.info(
f"Plex: Updating watched for {user.title} in library {library_name}"
)
library_section = user_plex.library.section(library_name)
# Update movies.
if library_data.movies:
# Search for Plex movies that are currently marked as unwatched.
for plex_movie in library_section.search(unwatched=True):
plex_identifiers = extract_identifiers_from_item(plex_movie)
# Check each stored movie for a match.
for stored_movie in library_data.movies:
if check_same_identifiers(
plex_identifiers, stored_movie.identifiers
):
# If the stored movie is marked as watched (or has enough progress),
# update the Plex movie accordingly.
if stored_movie.status.completed:
msg = f"Plex: {plex_movie.title} as watched for {user.title} in {library_name}"
if not dryrun:
plex_movie.markWatched()
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_movie.title,
None,
None,
)
else:
msg = f"Plex: {plex_movie.title} as partially watched for {floor(stored_movie.status.time / 60_000)} minutes for {user.title} in {library_name}"
if not dryrun:
plex_movie.updateTimeline(stored_movie.status.time)
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_movie.title,
duration=stored_movie.status.time,
)
# Once matched, no need to check further.
break
# Update TV Shows (series/episodes).
if library_data.series:
# For each Plex show in the library section:
plex_shows = library_section.search(unwatched=True)
for plex_show in plex_shows:
# Extract identifiers from the Plex show.
plex_show_identifiers = extract_identifiers_from_item(plex_show)
# Try to find a matching series in your stored library.
for stored_series in library_data.series:
if check_same_identifiers(
plex_show_identifiers, stored_series.identifiers
):
logger.info(f"Found matching show for '{plex_show.title}'")
# Now update episodes.
# Get the list of Plex episodes for this show.
plex_episodes = plex_show.episodes()
for plex_episode in plex_episodes:
plex_episode_identifiers = extract_identifiers_from_item(
plex_episode
)
for stored_ep in stored_series.episodes:
if check_same_identifiers(
plex_episode_identifiers, stored_ep.identifiers
):
if stored_ep.status.completed:
msg = f"Plex: {plex_show.title} {plex_episode.title} as watched for {user.title} in {library_name}"
if not dryrun:
plex_episode.markWatched()
logger.success(
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
)
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_show.title,
plex_episode.title,
)
else:
msg = f"Plex: {plex_show.title} {plex_episode.title} as partially watched for {floor(stored_ep.status.time / 60_000)} minutes for {user.title} in {library_name}"
if not dryrun:
plex_episode.updateTimeline(
stored_ep.status.time
)
logger.success(
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
)
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_show.title,
plex_episode.title,
stored_ep.status.time,
)
break # Found a matching episode.
break # Found a matching show.
except Exception as e:
logger.error(
f"Plex: Failed to update watched for {user.title} in library {library_name}, Error: {e}",
2,
)
raise e
# class plex accept base url and token and username and password but default with none # class plex accept base url and token and username and password but default with none
class Plex: class Plex:
def __init__( def __init__(
self, self,
env, baseurl=None,
base_url: str | None = None, token=None,
token: str | None = None, username=None,
user_name: str | None = None, password=None,
password: str | None = None, servername=None,
server_name: str | None = None, ssl_bypass=False,
ssl_bypass: bool = False, session=None,
session: requests.Session | None = None, ):
) -> None: self.server_type = "Plex"
self.env = env self.baseurl = baseurl
self.token = token
self.server_type: str = "Plex" self.username = username
self.ssl_bypass: bool = ssl_bypass self.password = password
self.servername = servername
self.ssl_bypass = ssl_bypass
if ssl_bypass: if ssl_bypass:
# Session for ssl bypass # Session for ssl bypass
session = requests.Session() session = requests.Session()
# By pass ssl hostname check https://github.com/pkkid/python-plexapi/issues/143#issuecomment-775485186 # By pass ssl hostname check https://github.com/pkkid/python-plexapi/issues/143#issuecomment-775485186
session.mount("https://", HostNameIgnoringAdapter()) session.mount("https://", HostNameIgnoringAdapter())
self.session = session self.session = session
self.plex: PlexServer = self.login( self.plex = self.login(self.baseurl, self.token)
base_url, token, user_name, password, server_name self.admin_user = self.plex.myPlexAccount()
) self.users = self.get_users()
self.base_url: str = self.plex._baseurl def login(self, baseurl, token):
self.admin_user: MyPlexAccount = self.plex.myPlexAccount()
self.users: list[MyPlexUser | MyPlexAccount] = self.get_users()
self.generate_guids: bool = str_to_bool(
get_env_value(self.env, "GENERATE_GUIDS", "True")
)
self.generate_locations: bool = str_to_bool(
get_env_value(self.env, "GENERATE_LOCATIONS", "True")
)
def login(
self,
base_url: str | None,
token: str | None,
user_name: str | None,
password: str | None,
server_name: str | None,
) -> PlexServer:
try: try:
if base_url and token: if baseurl and token:
plex: PlexServer = PlexServer(base_url, token, session=self.session) plex = PlexServer(baseurl, token, session=self.session)
elif user_name and password and server_name: elif self.username and self.password and self.servername:
# Login via plex account # Login via plex account
account = MyPlexAccount(user_name, password) account = MyPlexAccount(self.username, self.password)
plex = account.resource(server_name).connect() plex = account.resource(self.servername).connect()
else: else:
raise Exception("No complete plex credentials provided") raise Exception("No complete plex credentials provided")
return plex return plex
except Exception as e: except Exception as e:
if user_name: if self.username:
msg = f"Failed to login via plex account {user_name}" msg = f"Failed to login via plex account {self.username}"
logger.error(f"Plex: Failed to login, {msg}, Error: {e}") logger.error(f"Plex: Failed to login, {msg}, Error: {e}")
else: else:
logger.error(f"Plex: Failed to login, Error: {e}") logger.error(f"Plex: Failed to login, Error: {e}")
@ -183,9 +264,9 @@ class Plex:
def info(self) -> str: def info(self) -> str:
return f"Plex {self.plex.friendlyName}: {self.plex.version}" return f"Plex {self.plex.friendlyName}: {self.plex.version}"
def get_users(self) -> list[MyPlexUser | MyPlexAccount]: def get_users(self):
try: try:
users: list[MyPlexUser | MyPlexAccount] = self.plex.myPlexAccount().users() users = self.plex.myPlexAccount().users()
# append self to users # append self to users
users.append(self.plex.myPlexAccount()) users.append(self.plex.myPlexAccount())
@ -200,9 +281,7 @@ class Plex:
output = {} output = {}
libraries = self.plex.library.sections() libraries = self.plex.library.sections()
logger.debug( logger.debug(f"Plex: All Libraries {[library.title for library in libraries]}")
f"Plex: All Libraries {[library.title for library in libraries]}"
)
for library in libraries: for library in libraries:
library_title = library.title library_title = library.title
@ -221,9 +300,8 @@ class Plex:
logger.error(f"Plex: Failed to get libraries, Error: {e}") logger.error(f"Plex: Failed to get libraries, Error: {e}")
raise Exception(e) raise Exception(e)
def get_user_library_watched( def get_user_library_watched(self, user, user_plex, library) -> LibraryData:
self, user_name: str, user_plex: PlexServer, library: MovieSection | ShowSection user_name: str = user.username.lower() if user.username else user.title.lower()
) -> LibraryData:
try: try:
logger.info( logger.info(
f"Plex: Generating watched for {user_name} in library {library.title}", f"Plex: Generating watched for {user_name} in library {library.title}",
@ -237,14 +315,7 @@ class Plex:
unwatched=False unwatched=False
) + library_videos.search(inProgress=True): ) + library_videos.search(inProgress=True):
if video.isWatched or video.viewOffset >= 60000: if video.isWatched or video.viewOffset >= 60000:
watched.movies.append( watched.movies.append(get_mediaitem(video, video.isWatched))
get_mediaitem(
video,
video.isWatched,
self.generate_guids,
self.generate_locations,
)
)
elif library.type == "show": elif library.type == "show":
# Keep track of processed shows to reduce duplicate shows # Keep track of processed shows to reduce duplicate shows
@ -255,7 +326,7 @@ class Plex:
if show.key in processed_shows: if show.key in processed_shows:
continue continue
processed_shows.append(show.key) processed_shows.append(show.key)
show_guids = extract_guids_from_item(show, self.generate_guids) show_guids = extract_guids_from_item(show)
episode_mediaitem = [] episode_mediaitem = []
# Fetch watched or partially watched episodes # Fetch watched or partially watched episodes
@ -263,12 +334,7 @@ class Plex:
viewOffset__gte=60_000 viewOffset__gte=60_000
): ):
episode_mediaitem.append( episode_mediaitem.append(
get_mediaitem( get_mediaitem(episode, episode.isWatched)
episode,
episode.isWatched,
self.generate_guids,
self.generate_locations,
)
) )
if episode_mediaitem: if episode_mediaitem:
@ -279,16 +345,16 @@ class Plex:
locations=( locations=(
tuple( tuple(
[ [
filename_from_any_path(location) location.split("/")[-1]
for location in show.locations for location in show.locations
] ]
) )
if self.generate_locations if generate_locations
else tuple() else tuple()
), ),
imdb_id=show_guids.get("imdb"), imdb_id=show_guids.get("imdb", None),
tvdb_id=show_guids.get("tvdb"), tvdb_id=show_guids.get("tvdb", None),
tmdb_id=show_guids.get("tmdb"), tmdb_id=show_guids.get("tmdb", None),
), ),
episodes=episode_mediaitem, episodes=episode_mediaitem,
) )
@ -302,14 +368,8 @@ class Plex:
) )
return LibraryData(title=library.title) return LibraryData(title=library.title)
def get_watched( def get_watched(self, users, sync_libraries) -> dict[str, UserData]:
self,
users: list[MyPlexUser | MyPlexAccount],
sync_libraries: list[str],
users_watched: dict[str, UserData] = None,
) -> dict[str, UserData]:
try: try:
if not users_watched:
users_watched: dict[str, UserData] = {} users_watched: dict[str, UserData] = {}
for user in users: for user in users:
@ -318,220 +378,46 @@ class Plex:
else: else:
token = user.get_token(self.plex.machineIdentifier) token = user.get_token(self.plex.machineIdentifier)
if token: if token:
user_plex = self.login(self.base_url, token, None, None, None) user_plex = self.login(
self.plex._baseurl,
token,
)
else: else:
logger.error( logger.error(
f"Plex: Failed to get token for {user.title}, skipping", f"Plex: Failed to get token for {user.title}, skipping",
) )
continue continue
user_name: str = (
user.username.lower() if user.username else user.title.lower()
)
libraries = user_plex.library.sections() libraries = user_plex.library.sections()
for library in libraries: for library in libraries:
if library.title not in sync_libraries: if library.title not in sync_libraries:
continue continue
if user_name not in users_watched:
users_watched[user_name] = UserData()
if library.title in users_watched[user_name].libraries:
logger.info(
f"Plex: {user_name} {library.title} watched history has already been gathered, skipping"
)
continue
library_data = self.get_user_library_watched( library_data = self.get_user_library_watched(
user_name, user_plex, library user, user_plex, library
) )
users_watched[user_name].libraries[library.title] = library_data if user.title.lower() not in users_watched:
users_watched[user.title.lower()] = UserData()
users_watched[user.title.lower()].libraries[library.title] = (
library_data
)
return users_watched return users_watched
except Exception as e: except Exception as e:
logger.error(f"Plex: Failed to get users watched, Error: {e}") logger.error(f"Plex: Failed to get watched, Error: {e}")
return {} raise Exception(e)
def update_user_watched(
self,
user: MyPlexAccount,
user_plex: PlexServer,
library_data: LibraryData,
library_name: str,
dryrun: bool,
) -> None:
# If there are no movies or shows to update, exit early.
if not library_data.series and not library_data.movies:
return
logger.info(
f"Plex: Updating watched for {user.title} in library {library_name}"
)
library_section = user_plex.library.section(library_name)
if not library_section:
logger.error(
f"Plex: Library {library_name} not found for {user.title}, skipping",
)
return
# Update movies.
if library_data.movies:
# Search for Plex movies that are currently marked as unwatched.
for plex_movie in library_section.search(unwatched=True):
plex_identifiers = extract_identifiers_from_item(
plex_movie, self.generate_guids, self.generate_locations
)
# Check each stored movie for a match.
for stored_movie in library_data.movies:
if check_same_identifiers(
plex_identifiers, stored_movie.identifiers
):
# If the stored movie is marked as watched (or has enough progress),
# update the Plex movie accordingly.
if stored_movie.status.completed:
msg = f"Plex: {plex_movie.title} as watched for {user.title} in {library_name}"
if not dryrun:
try:
plex_movie.markWatched()
except Exception as e:
logger.error(
f"Plex: Failed to mark {plex_movie.title} as watched, Error: {e}"
)
continue
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_movie.title,
None,
None,
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
)
else:
msg = f"Plex: {plex_movie.title} as partially watched for {floor(stored_movie.status.time / 60_000)} minutes for {user.title} in {library_name}"
if not dryrun:
try:
plex_movie.updateTimeline(stored_movie.status.time)
except Exception as e:
logger.error(
f"Plex: Failed to update {plex_movie.title} timeline, Error: {e}"
)
continue
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_movie.title,
duration=stored_movie.status.time,
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
)
# Once matched, no need to check further.
break
# Update TV Shows (series/episodes).
if library_data.series:
# For each Plex show in the library section:
plex_shows = library_section.search(unwatched=True)
for plex_show in plex_shows:
# Extract identifiers from the Plex show.
plex_show_identifiers = extract_identifiers_from_item(
plex_show, self.generate_guids, self.generate_locations
)
# Try to find a matching series in your stored library.
for stored_series in library_data.series:
if check_same_identifiers(
plex_show_identifiers, stored_series.identifiers
):
logger.trace(f"Found matching show for '{plex_show.title}'")
# Now update episodes.
# Get the list of Plex episodes for this show.
plex_episodes = plex_show.episodes()
for plex_episode in plex_episodes:
plex_episode_identifiers = extract_identifiers_from_item(
plex_episode,
self.generate_guids,
self.generate_locations,
)
for stored_ep in stored_series.episodes:
if check_same_identifiers(
plex_episode_identifiers, stored_ep.identifiers
):
if stored_ep.status.completed:
msg = f"Plex: {plex_show.title} {plex_episode.title} as watched for {user.title} in {library_name}"
if not dryrun:
try:
plex_episode.markWatched()
except Exception as e:
logger.error(
f"Plex: Failed to mark {plex_show.title} {plex_episode.title} as watched, Error: {e}"
)
continue
logger.success(
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
)
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_show.title,
plex_episode.title,
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
)
else:
msg = f"Plex: {plex_show.title} {plex_episode.title} as partially watched for {floor(stored_ep.status.time / 60_000)} minutes for {user.title} in {library_name}"
if not dryrun:
try:
plex_episode.updateTimeline(
stored_ep.status.time
)
except Exception as e:
logger.error(
f"Plex: Failed to update {plex_show.title} {plex_episode.title} timeline, Error: {e}"
)
continue
logger.success(
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
)
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_show.title,
plex_episode.title,
stored_ep.status.time,
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
)
break # Found a matching episode.
break # Found a matching show.
def update_watched( def update_watched(
self, self,
watched_list: dict[str, UserData], watched_list: dict[str, UserData],
user_mapping: dict[str, str] | None = None, user_mapping=None,
library_mapping: dict[str, str] | None = None, library_mapping=None,
dryrun: bool = False, dryrun=False,
) -> None: ):
try:
for user, user_data in watched_list.items(): for user, user_data in watched_list.items():
user_other = None user_other = None
# If type of user is dict # If type of user is dict
@ -540,7 +426,9 @@ class Plex:
for index, value in enumerate(self.users): for index, value in enumerate(self.users):
username_title = ( username_title = (
value.username.lower() if value.username else value.title.lower() value.username.lower()
if value.username
else value.title.lower()
) )
if user.lower() == username_title: if user.lower() == username_title:
@ -554,19 +442,15 @@ class Plex:
user_plex = self.plex user_plex = self.plex
else: else:
if isinstance(user, str): if isinstance(user, str):
logger.debug( logger.warning(
f"Plex: {user} is not a plex object, attempting to get object for user", f"Plex: {user} is not a plex object, attempting to get object for user",
) )
user = self.plex.myPlexAccount().user(user) user = self.plex.myPlexAccount().user(user)
if not isinstance(user, MyPlexUser):
logger.error(f"Plex: {user} failed to get PlexUser")
continue
token = user.get_token(self.plex.machineIdentifier) token = user.get_token(self.plex.machineIdentifier)
if token: if token:
user_plex = PlexServer( user_plex = PlexServer(
self.base_url, self.plex._baseurl,
token, token,
session=self.session, session=self.session,
) )
@ -576,10 +460,6 @@ class Plex:
) )
continue continue
if not user_plex:
logger.error(f"Plex: {user} Failed to get PlexServer")
continue
for library_name in user_data.libraries: for library_name in user_data.libraries:
library_data = user_data.libraries[library_name] library_data = user_data.libraries[library_name]
library_other = None library_other = None
@ -587,7 +467,9 @@ class Plex:
library_other = search_mapping(library_mapping, library_name) library_other = search_mapping(library_mapping, library_name)
# if library in plex library list # if library in plex library list
library_list = user_plex.library.sections() library_list = user_plex.library.sections()
if library_name.lower() not in [x.title.lower() for x in library_list]: if library_name.lower() not in [
x.title.lower() for x in library_list
]:
if library_other: if library_other:
if library_other.lower() in [ if library_other.lower() in [
x.title.lower() for x in library_list x.title.lower() for x in library_list
@ -607,16 +489,14 @@ class Plex:
) )
continue continue
try: update_user_watched(
self.update_user_watched(
user, user,
user_plex, user_plex,
library_data, library_data,
library_name, library_name,
dryrun, dryrun,
) )
except Exception as e: except Exception as e:
logger.error( logger.error(f"Plex: Failed to update watched, Error: {e}")
f"Plex: Failed to update watched for {user.title} in {library_name}, Error: {e}", raise Exception(e)
)
continue

View File

@ -1,4 +1,4 @@
from plexapi.myplex import MyPlexAccount, MyPlexUser from plexapi.myplex import MyPlexAccount
from loguru import logger from loguru import logger
from src.emby import Emby from src.emby import Emby
@ -109,10 +109,7 @@ def setup_users(
blacklist_users: list[str], blacklist_users: list[str],
whitelist_users: list[str], whitelist_users: list[str],
user_mapping: dict[str, str] | None = None, user_mapping: dict[str, str] | None = None,
) -> tuple[ ) -> tuple[list[MyPlexAccount] | dict[str, str], list[MyPlexAccount] | dict[str, str]]:
list[MyPlexAccount | MyPlexUser] | dict[str, str],
list[MyPlexAccount | MyPlexUser] | dict[str, str],
]:
server_1_users = generate_user_list(server_1) server_1_users = generate_user_list(server_1)
server_2_users = generate_user_list(server_2) server_2_users = generate_user_list(server_2)
logger.debug(f"Server 1 users: {server_1_users}") logger.debug(f"Server 1 users: {server_1_users}")

View File

@ -1,14 +1,12 @@
import copy import copy
from datetime import datetime from pydantic import BaseModel
from pydantic import BaseModel, Field
from loguru import logger from loguru import logger
from typing import Any
from src.functions import search_mapping from src.functions import search_mapping
class MediaIdentifiers(BaseModel): class MediaIdentifiers(BaseModel):
title: str | None = None title: str
# File information, will be folder for series and media file for episode/movie # File information, will be folder for series and media file for episode/movie
locations: tuple[str, ...] = tuple() locations: tuple[str, ...] = tuple()
@ -22,7 +20,6 @@ class MediaIdentifiers(BaseModel):
class WatchedStatus(BaseModel): class WatchedStatus(BaseModel):
completed: bool completed: bool
time: int time: int
viewed_date: datetime
class MediaItem(BaseModel): class MediaItem(BaseModel):
@ -32,122 +29,17 @@ class MediaItem(BaseModel):
class Series(BaseModel): class Series(BaseModel):
identifiers: MediaIdentifiers identifiers: MediaIdentifiers
episodes: list[MediaItem] = Field(default_factory=list) episodes: list[MediaItem] = []
class LibraryData(BaseModel): class LibraryData(BaseModel):
title: str title: str
movies: list[MediaItem] = Field(default_factory=list) movies: list[MediaItem] = []
series: list[Series] = Field(default_factory=list) series: list[Series] = []
class UserData(BaseModel): class UserData(BaseModel):
libraries: dict[str, LibraryData] = Field(default_factory=dict) libraries: dict[str, LibraryData] = {}
def merge_mediaitem_data(ep1: MediaItem, ep2: MediaItem) -> MediaItem:
"""
Merge two MediaItem episodes by comparing their watched status.
If one is completed while the other isn't, choose the completed one.
If both are completed or both are not, choose the one with the higher time.
"""
if ep1.status.completed != ep2.status.completed:
return ep1 if ep1.status.completed else ep2
return ep1 if ep1.status.time >= ep2.status.time else ep2
def merge_series_data(series1: Series, series2: Series) -> Series:
"""
Merge two Series objects by combining their episodes.
For duplicate episodes (determined by check_same_identifiers), merge their watched status.
"""
merged_series = copy.deepcopy(series1)
for ep in series2.episodes:
for idx, merged_ep in enumerate(merged_series.episodes):
if check_same_identifiers(ep.identifiers, merged_ep.identifiers):
merged_series.episodes[idx] = merge_mediaitem_data(merged_ep, ep)
break
else:
merged_series.episodes.append(copy.deepcopy(ep))
return merged_series
def merge_library_data(lib1: LibraryData, lib2: LibraryData) -> LibraryData:
"""
Merge two LibraryData objects by extending movies and merging series.
For series, duplicates are determined using check_same_identifiers.
"""
merged = copy.deepcopy(lib1)
# Merge movies.
for movie in lib2.movies:
for idx, merged_movie in enumerate(merged.movies):
if check_same_identifiers(movie.identifiers, merged_movie.identifiers):
merged.movies[idx] = merge_mediaitem_data(merged_movie, movie)
break
else:
merged.movies.append(copy.deepcopy(movie))
# Merge series.
for series2 in lib2.series:
for idx, series1 in enumerate(merged.series):
if check_same_identifiers(series1.identifiers, series2.identifiers):
merged.series[idx] = merge_series_data(series1, series2)
break
else:
merged.series.append(copy.deepcopy(series2))
return merged
def merge_user_data(user1: UserData, user2: UserData) -> UserData:
"""
Merge two UserData objects by merging their libraries.
If a library exists in both, merge its content; otherwise, add the new library.
"""
merged_libraries = copy.deepcopy(user1.libraries)
for lib_key, lib_data in user2.libraries.items():
if lib_key in merged_libraries:
merged_libraries[lib_key] = merge_library_data(
merged_libraries[lib_key], lib_data
)
else:
merged_libraries[lib_key] = copy.deepcopy(lib_data)
return UserData(libraries=merged_libraries)
def merge_server_watched(
watched_list_1: dict[str, UserData],
watched_list_2: dict[str, UserData],
user_mapping: dict[str, str] | None = None,
library_mapping: dict[str, str] | None = None,
) -> dict[str, UserData]:
"""
Merge two dictionaries of UserData while taking into account possible
differences in user and library keys via the provided mappings.
"""
merged_watched = copy.deepcopy(watched_list_1)
for user_2, user_data in watched_list_2.items():
# Determine matching user key.
user_key = user_mapping.get(user_2, user_2) if user_mapping else user_2
if user_key not in merged_watched:
merged_watched[user_2] = copy.deepcopy(user_data)
continue
for lib_key, lib_data in user_data.libraries.items():
mapped_lib_key = (
library_mapping.get(lib_key, lib_key) if library_mapping else lib_key
)
if mapped_lib_key not in merged_watched[user_key].libraries:
merged_watched[user_key].libraries[lib_key] = copy.deepcopy(lib_data)
else:
merged_watched[user_key].libraries[mapped_lib_key] = merge_library_data(
merged_watched[user_key].libraries[mapped_lib_key],
lib_data,
)
return merged_watched
def check_same_identifiers(item1: MediaIdentifiers, item2: MediaIdentifiers) -> bool: def check_same_identifiers(item1: MediaIdentifiers, item2: MediaIdentifiers) -> bool:
@ -211,8 +103,8 @@ def check_remove_entry(item1: MediaItem, item2: MediaItem) -> bool:
def cleanup_watched( def cleanup_watched(
watched_list_1: dict[str, UserData], watched_list_1: dict[str, UserData],
watched_list_2: dict[str, UserData], watched_list_2: dict[str, UserData],
user_mapping: dict[str, str] | None = None, user_mapping=None,
library_mapping: dict[str, str] | None = None, library_mapping=None,
) -> dict[str, UserData]: ) -> dict[str, UserData]:
modified_watched_list_1 = copy.deepcopy(watched_list_1) modified_watched_list_1 = copy.deepcopy(watched_list_1)
@ -307,17 +199,11 @@ def cleanup_watched(
return modified_watched_list_1 return modified_watched_list_1
def get_other( def get_other(watched_list, object_1, object_2):
watched_list: dict[str, Any], object_1: str, object_2: str | None
) -> str | None:
if object_1 in watched_list: if object_1 in watched_list:
return object_1 return object_1
elif object_2 in watched_list:
if object_2 and object_2 in watched_list:
return object_2 return object_2
else:
logger.info( logger.info(f"{object_1} and {object_2} not found in watched list 2")
f"{object_1}{' and ' + object_2 if object_2 else ''} not found in watched list 2"
)
return None return None

View File

@ -1,4 +1,3 @@
from datetime import datetime
import sys import sys
import os import os
@ -24,8 +23,6 @@ from src.watched import (
cleanup_watched, cleanup_watched,
) )
viewed_date = datetime.today()
tv_shows_watched_list_1: list[Series] = [ tv_shows_watched_list_1: list[Series] = [
Series( Series(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -44,7 +41,7 @@ tv_shows_watched_list_1: list[Series] = [
tmdb_id="968589", tmdb_id="968589",
tvdb_id="295296", tvdb_id="295296",
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -54,9 +51,7 @@ tv_shows_watched_list_1: list[Series] = [
tmdb_id="968590", tmdb_id="968590",
tvdb_id="295297", tvdb_id="295297",
), ),
status=WatchedStatus( status=WatchedStatus(completed=False, time=240000),
completed=False, time=240000, viewed_date=viewed_date
),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -66,7 +61,7 @@ tv_shows_watched_list_1: list[Series] = [
tmdb_id="968592", tmdb_id="968592",
tvdb_id="295298", tvdb_id="295298",
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
], ],
), ),
@ -87,7 +82,7 @@ tv_shows_watched_list_1: list[Series] = [
tmdb_id="4661246", tmdb_id="4661246",
tvdb_id="10009418", tvdb_id="10009418",
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -97,9 +92,7 @@ tv_shows_watched_list_1: list[Series] = [
tmdb_id="4712059", tmdb_id="4712059",
tvdb_id="10009419", tvdb_id="10009419",
), ),
status=WatchedStatus( status=WatchedStatus(completed=False, time=240000),
completed=False, time=240000, viewed_date=viewed_date
),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -109,7 +102,7 @@ tv_shows_watched_list_1: list[Series] = [
tmdb_id="4712061", tmdb_id="4712061",
tvdb_id="10009420", tvdb_id="10009420",
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
], ],
), ),
@ -130,7 +123,7 @@ tv_shows_watched_list_1: list[Series] = [
tmdb_id="3070048", tmdb_id="3070048",
tvdb_id="8438181", tvdb_id="8438181",
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -140,7 +133,7 @@ tv_shows_watched_list_1: list[Series] = [
tmdb_id="4568681", tmdb_id="4568681",
tvdb_id="9829910", tvdb_id="9829910",
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -150,7 +143,7 @@ tv_shows_watched_list_1: list[Series] = [
tmdb_id="4497012", tmdb_id="4497012",
tvdb_id="9870382", tvdb_id="9870382",
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
], ],
), ),
@ -177,7 +170,7 @@ tv_shows_watched_list_2: list[Series] = [
tvdb_id="295294", tvdb_id="295294",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -187,9 +180,7 @@ tv_shows_watched_list_2: list[Series] = [
tvdb_id="295295", tvdb_id="295295",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus( status=WatchedStatus(completed=False, time=300670),
completed=False, time=300670, viewed_date=viewed_date
),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -199,7 +190,7 @@ tv_shows_watched_list_2: list[Series] = [
tvdb_id="295298", tvdb_id="295298",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
], ],
), ),
@ -220,7 +211,7 @@ tv_shows_watched_list_2: list[Series] = [
tvdb_id="9959300", tvdb_id="9959300",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -230,9 +221,7 @@ tv_shows_watched_list_2: list[Series] = [
tvdb_id="10009417", tvdb_id="10009417",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus( status=WatchedStatus(completed=False, time=300741),
completed=False, time=300741, viewed_date=viewed_date
),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -242,7 +231,7 @@ tv_shows_watched_list_2: list[Series] = [
tvdb_id="10009420", tvdb_id="10009420",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
], ],
), ),
@ -263,7 +252,7 @@ tv_shows_watched_list_2: list[Series] = [
tvdb_id="8438181", tvdb_id="8438181",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -273,7 +262,7 @@ tv_shows_watched_list_2: list[Series] = [
tvdb_id="9829910", tvdb_id="9829910",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -283,7 +272,7 @@ tv_shows_watched_list_2: list[Series] = [
tvdb_id="9870382", tvdb_id="9870382",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
], ],
), ),
@ -310,7 +299,7 @@ expected_tv_show_watched_list_1: list[Series] = [
tmdb_id="968589", tmdb_id="968589",
tvdb_id="295296", tvdb_id="295296",
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -320,9 +309,7 @@ expected_tv_show_watched_list_1: list[Series] = [
tmdb_id="968590", tmdb_id="968590",
tvdb_id="295297", tvdb_id="295297",
), ),
status=WatchedStatus( status=WatchedStatus(completed=False, time=240000),
completed=False, time=240000, viewed_date=viewed_date
),
), ),
], ],
), ),
@ -343,7 +330,7 @@ expected_tv_show_watched_list_1: list[Series] = [
tmdb_id="4661246", tmdb_id="4661246",
tvdb_id="10009418", tvdb_id="10009418",
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -353,9 +340,7 @@ expected_tv_show_watched_list_1: list[Series] = [
tmdb_id="4712059", tmdb_id="4712059",
tvdb_id="10009419", tvdb_id="10009419",
), ),
status=WatchedStatus( status=WatchedStatus(completed=False, time=240000),
completed=False, time=240000, viewed_date=viewed_date
),
), ),
], ],
), ),
@ -382,7 +367,7 @@ expected_tv_show_watched_list_2: list[Series] = [
tvdb_id="295294", tvdb_id="295294",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -392,9 +377,7 @@ expected_tv_show_watched_list_2: list[Series] = [
tvdb_id="295295", tvdb_id="295295",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus( status=WatchedStatus(completed=False, time=300670),
completed=False, time=300670, viewed_date=viewed_date
),
), ),
], ],
), ),
@ -415,7 +398,7 @@ expected_tv_show_watched_list_2: list[Series] = [
tvdb_id="9959300", tvdb_id="9959300",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -425,9 +408,7 @@ expected_tv_show_watched_list_2: list[Series] = [
tvdb_id="10009417", tvdb_id="10009417",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus( status=WatchedStatus(completed=False, time=300741),
completed=False, time=300741, viewed_date=viewed_date
),
), ),
], ],
), ),
@ -445,7 +426,7 @@ movies_watched_list_1: list[MediaItem] = [
tmdb_id="10378", tmdb_id="10378",
tvdb_id="12352", tvdb_id="12352",
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -455,7 +436,7 @@ movies_watched_list_1: list[MediaItem] = [
tmdb_id="1029575", tmdb_id="1029575",
tvdb_id="351194", tvdb_id="351194",
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -465,7 +446,7 @@ movies_watched_list_1: list[MediaItem] = [
tmdb_id="466420", tmdb_id="466420",
tvdb_id="135852", tvdb_id="135852",
), ),
status=WatchedStatus(completed=False, time=240000, viewed_date=viewed_date), status=WatchedStatus(completed=False, time=240000),
), ),
] ]
@ -481,7 +462,7 @@ movies_watched_list_2: list[MediaItem] = [
tmdb_id="1029575", tmdb_id="1029575",
tvdb_id=None, tvdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -491,7 +472,7 @@ movies_watched_list_2: list[MediaItem] = [
tmdb_id="507089", tmdb_id="507089",
tvdb_id=None, tvdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -501,7 +482,7 @@ movies_watched_list_2: list[MediaItem] = [
tmdb_id="695721", tmdb_id="695721",
tvdb_id=None, tvdb_id=None,
), ),
status=WatchedStatus(completed=False, time=301215, viewed_date=viewed_date), status=WatchedStatus(completed=False, time=301215),
), ),
] ]
@ -517,7 +498,7 @@ expected_movie_watched_list_1: list[MediaItem] = [
tmdb_id="10378", tmdb_id="10378",
tvdb_id="12352", tvdb_id="12352",
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -527,7 +508,7 @@ expected_movie_watched_list_1: list[MediaItem] = [
tmdb_id="466420", tmdb_id="466420",
tvdb_id="135852", tvdb_id="135852",
), ),
status=WatchedStatus(completed=False, time=240000, viewed_date=viewed_date), status=WatchedStatus(completed=False, time=240000),
), ),
] ]
@ -543,7 +524,7 @@ expected_movie_watched_list_2: list[MediaItem] = [
tmdb_id="507089", tmdb_id="507089",
tvdb_id=None, tvdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -553,7 +534,7 @@ expected_movie_watched_list_2: list[MediaItem] = [
tmdb_id="695721", tmdb_id="695721",
tvdb_id=None, tvdb_id=None,
), ),
status=WatchedStatus(completed=False, time=301215, viewed_date=viewed_date), status=WatchedStatus(completed=False, time=301215),
), ),
] ]
@ -581,7 +562,7 @@ tv_shows_2_watched_list_1: list[Series] = [
tmdb_id="282843", tmdb_id="282843",
tvdb_id="176357", tvdb_id="176357",
), ),
status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date), status=WatchedStatus(completed=True, time=0),
) )
], ],
) )

362
uv.lock
View File

@ -1,97 +1,90 @@
version = 1 version = 1
revision = 2 revision = 1
requires-python = ">=3.12" requires-python = ">=3.12"
[[package]] [[package]]
name = "annotated-types" name = "annotated-types"
version = "0.7.0" version = "0.7.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 },
] ]
[[package]] [[package]]
name = "certifi" name = "certifi"
version = "2025.8.3" version = "2025.1.31"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, { url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393 },
] ]
[[package]] [[package]]
name = "charset-normalizer" name = "charset-normalizer"
version = "3.4.3" version = "3.4.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" } sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" }, { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105 },
{ url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" }, { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404 },
{ url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" }, { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423 },
{ url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" }, { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184 },
{ url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" }, { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268 },
{ url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" }, { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601 },
{ url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" }, { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098 },
{ url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" }, { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520 },
{ url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" }, { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852 },
{ url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" }, { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488 },
{ url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" }, { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192 },
{ url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" }, { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550 },
{ url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" }, { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785 },
{ url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" }, { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698 },
{ url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" }, { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162 },
{ url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" }, { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263 },
{ url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" }, { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966 },
{ url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" }, { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992 },
{ url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" }, { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162 },
{ url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" }, { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972 },
{ url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" }, { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095 },
{ url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" }, { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668 },
{ url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" }, { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073 },
{ url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" }, { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732 },
{ url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" }, { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391 },
{ url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" }, { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702 },
{ url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" }, { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767 },
{ url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" },
{ url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" },
{ url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" },
{ url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" },
{ url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" },
{ url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" },
{ url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" },
] ]
[[package]] [[package]]
name = "colorama" name = "colorama"
version = "0.4.6" version = "0.4.6"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 },
] ]
[[package]] [[package]]
name = "idna" name = "idna"
version = "3.10" version = "3.10"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 },
] ]
[[package]] [[package]]
name = "iniconfig" name = "iniconfig"
version = "2.1.0" version = "2.0.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 },
] ]
[[package]] [[package]]
name = "jellyplex-watched" name = "jellyplex-watched"
version = "8.3.0" version = "6.1.2"
source = { virtual = "." } source = { virtual = "." }
dependencies = [ dependencies = [
{ name = "loguru" }, { name = "loguru" },
@ -104,9 +97,7 @@ dependencies = [
[package.dev-dependencies] [package.dev-dependencies]
dev = [ dev = [
{ name = "mypy" },
{ name = "pytest" }, { name = "pytest" },
{ name = "types-requests" },
] ]
lint = [ lint = [
{ name = "ruff" }, { name = "ruff" },
@ -115,20 +106,16 @@ lint = [
[package.metadata] [package.metadata]
requires-dist = [ requires-dist = [
{ name = "loguru", specifier = ">=0.7.3" }, { name = "loguru", specifier = ">=0.7.3" },
{ name = "packaging", specifier = "==25.0" }, { name = "packaging", specifier = "==24.2" },
{ name = "plexapi", specifier = "==4.17.1" }, { name = "plexapi", specifier = "==4.16.1" },
{ name = "pydantic", specifier = "==2.11.7" }, { name = "pydantic", specifier = "==2.10.6" },
{ name = "python-dotenv", specifier = "==1.1.1" }, { name = "python-dotenv", specifier = "==1.0.0" },
{ name = "requests", specifier = "==2.32.5" }, { name = "requests", specifier = "==2.32.3" },
] ]
[package.metadata.requires-dev] [package.metadata.requires-dev]
dev = [ dev = [{ name = "pytest", specifier = ">=8.3.4" }]
{ name = "mypy", specifier = ">=1.16.1" }, lint = [{ name = "ruff", specifier = ">=0.9.6" }]
{ name = "pytest", specifier = ">=8.4.1" },
{ name = "types-requests", specifier = ">=2.32.0.20250611" },
]
lint = [{ name = "ruff", specifier = ">=0.12.3" }]
[[package]] [[package]]
name = "loguru" name = "loguru"
@ -138,185 +125,121 @@ dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" }, { name = "colorama", marker = "sys_platform == 'win32'" },
{ name = "win32-setctime", marker = "sys_platform == 'win32'" }, { name = "win32-setctime", marker = "sys_platform == 'win32'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559, upload-time = "2024-12-06T11:20:56.608Z" } sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595, upload-time = "2024-12-06T11:20:54.538Z" }, { url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595 },
]
[[package]]
name = "mypy"
version = "1.18.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "mypy-extensions" },
{ name = "pathspec" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/14/a3/931e09fc02d7ba96da65266884da4e4a8806adcdb8a57faaacc6edf1d538/mypy-1.18.1.tar.gz", hash = "sha256:9e988c64ad3ac5987f43f5154f884747faf62141b7f842e87465b45299eea5a9", size = 3448447, upload-time = "2025-09-11T23:00:47.067Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e7/14/1c3f54d606cb88a55d1567153ef3a8bc7b74702f2ff5eb64d0994f9e49cb/mypy-1.18.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:502cde8896be8e638588b90fdcb4c5d5b8c1b004dfc63fd5604a973547367bb9", size = 12911082, upload-time = "2025-09-11T23:00:41.465Z" },
{ url = "https://files.pythonhosted.org/packages/90/83/235606c8b6d50a8eba99773add907ce1d41c068edb523f81eb0d01603a83/mypy-1.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7509549b5e41be279afc1228242d0e397f1af2919a8f2877ad542b199dc4083e", size = 11919107, upload-time = "2025-09-11T22:58:40.903Z" },
{ url = "https://files.pythonhosted.org/packages/ca/25/4e2ce00f8d15b99d0c68a2536ad63e9eac033f723439ef80290ec32c1ff5/mypy-1.18.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5956ecaabb3a245e3f34100172abca1507be687377fe20e24d6a7557e07080e2", size = 12472551, upload-time = "2025-09-11T22:58:37.272Z" },
{ url = "https://files.pythonhosted.org/packages/32/bb/92642a9350fc339dd9dcefcf6862d171b52294af107d521dce075f32f298/mypy-1.18.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8750ceb014a96c9890421c83f0db53b0f3b8633e2864c6f9bc0a8e93951ed18d", size = 13340554, upload-time = "2025-09-11T22:59:38.756Z" },
{ url = "https://files.pythonhosted.org/packages/cd/ee/38d01db91c198fb6350025d28f9719ecf3c8f2c55a0094bfbf3ef478cc9a/mypy-1.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fb89ea08ff41adf59476b235293679a6eb53a7b9400f6256272fb6029bec3ce5", size = 13530933, upload-time = "2025-09-11T22:59:20.228Z" },
{ url = "https://files.pythonhosted.org/packages/da/8d/6d991ae631f80d58edbf9d7066e3f2a96e479dca955d9a968cd6e90850a3/mypy-1.18.1-cp312-cp312-win_amd64.whl", hash = "sha256:2657654d82fcd2a87e02a33e0d23001789a554059bbf34702d623dafe353eabf", size = 9828426, upload-time = "2025-09-11T23:00:21.007Z" },
{ url = "https://files.pythonhosted.org/packages/e4/ec/ef4a7260e1460a3071628a9277a7579e7da1b071bc134ebe909323f2fbc7/mypy-1.18.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d70d2b5baf9b9a20bc9c730015615ae3243ef47fb4a58ad7b31c3e0a59b5ef1f", size = 12918671, upload-time = "2025-09-11T22:58:29.814Z" },
{ url = "https://files.pythonhosted.org/packages/a1/82/0ea6c3953f16223f0b8eda40c1aeac6bd266d15f4902556ae6e91f6fca4c/mypy-1.18.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b8367e33506300f07a43012fc546402f283c3f8bcff1dc338636affb710154ce", size = 11913023, upload-time = "2025-09-11T23:00:29.049Z" },
{ url = "https://files.pythonhosted.org/packages/ae/ef/5e2057e692c2690fc27b3ed0a4dbde4388330c32e2576a23f0302bc8358d/mypy-1.18.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:913f668ec50c3337b89df22f973c1c8f0b29ee9e290a8b7fe01cc1ef7446d42e", size = 12473355, upload-time = "2025-09-11T23:00:04.544Z" },
{ url = "https://files.pythonhosted.org/packages/98/43/b7e429fc4be10e390a167b0cd1810d41cb4e4add4ae50bab96faff695a3b/mypy-1.18.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a0e70b87eb27b33209fa4792b051c6947976f6ab829daa83819df5f58330c71", size = 13346944, upload-time = "2025-09-11T22:58:23.024Z" },
{ url = "https://files.pythonhosted.org/packages/89/4e/899dba0bfe36bbd5b7c52e597de4cf47b5053d337b6d201a30e3798e77a6/mypy-1.18.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c378d946e8a60be6b6ede48c878d145546fb42aad61df998c056ec151bf6c746", size = 13512574, upload-time = "2025-09-11T22:59:52.152Z" },
{ url = "https://files.pythonhosted.org/packages/f5/f8/7661021a5b0e501b76440454d786b0f01bb05d5c4b125fcbda02023d0250/mypy-1.18.1-cp313-cp313-win_amd64.whl", hash = "sha256:2cd2c1e0f3a7465f22731987fff6fc427e3dcbb4ca5f7db5bbeaff2ff9a31f6d", size = 9837684, upload-time = "2025-09-11T22:58:44.454Z" },
{ url = "https://files.pythonhosted.org/packages/bf/87/7b173981466219eccc64c107cf8e5ab9eb39cc304b4c07df8e7881533e4f/mypy-1.18.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ba24603c58e34dd5b096dfad792d87b304fc6470cbb1c22fd64e7ebd17edcc61", size = 12900265, upload-time = "2025-09-11T22:59:03.4Z" },
{ url = "https://files.pythonhosted.org/packages/ae/cc/b10e65bae75b18a5ac8f81b1e8e5867677e418f0dd2c83b8e2de9ba96ebd/mypy-1.18.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ed36662fb92ae4cb3cacc682ec6656208f323bbc23d4b08d091eecfc0863d4b5", size = 11942890, upload-time = "2025-09-11T23:00:00.607Z" },
{ url = "https://files.pythonhosted.org/packages/39/d4/aeefa07c44d09f4c2102e525e2031bc066d12e5351f66b8a83719671004d/mypy-1.18.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:040ecc95e026f71a9ad7956fea2724466602b561e6a25c2e5584160d3833aaa8", size = 12472291, upload-time = "2025-09-11T22:59:43.425Z" },
{ url = "https://files.pythonhosted.org/packages/c6/07/711e78668ff8e365f8c19735594ea95938bff3639a4c46a905e3ed8ff2d6/mypy-1.18.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:937e3ed86cb731276706e46e03512547e43c391a13f363e08d0fee49a7c38a0d", size = 13318610, upload-time = "2025-09-11T23:00:17.604Z" },
{ url = "https://files.pythonhosted.org/packages/ca/85/df3b2d39339c31d360ce299b418c55e8194ef3205284739b64962f6074e7/mypy-1.18.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1f95cc4f01c0f1701ca3b0355792bccec13ecb2ec1c469e5b85a6ef398398b1d", size = 13513697, upload-time = "2025-09-11T22:58:59.534Z" },
{ url = "https://files.pythonhosted.org/packages/b1/df/462866163c99ea73bb28f0eb4d415c087e30de5d36ee0f5429d42e28689b/mypy-1.18.1-cp314-cp314-win_amd64.whl", hash = "sha256:e4f16c0019d48941220ac60b893615be2f63afedaba6a0801bdcd041b96991ce", size = 9985739, upload-time = "2025-09-11T22:58:51.644Z" },
{ url = "https://files.pythonhosted.org/packages/e0/1d/4b97d3089b48ef3d904c9ca69fab044475bd03245d878f5f0b3ea1daf7ce/mypy-1.18.1-py3-none-any.whl", hash = "sha256:b76a4de66a0ac01da1be14ecc8ae88ddea33b8380284a9e3eae39d57ebcbe26e", size = 2352212, upload-time = "2025-09-11T22:59:26.576Z" },
]
[[package]]
name = "mypy-extensions"
version = "1.1.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" },
] ]
[[package]] [[package]]
name = "packaging" name = "packaging"
version = "25.0" version = "24.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 },
]
[[package]]
name = "pathspec"
version = "0.12.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" },
] ]
[[package]] [[package]]
name = "plexapi" name = "plexapi"
version = "4.17.1" version = "4.16.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "requests" }, { name = "requests" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/2a/02/1bebd67c3cd94a45f6c3520da971791b66457535c9771d8e0068746d7bc2/plexapi-4.17.1.tar.gz", hash = "sha256:1e5bfb486bb150e058a80ff4fb9aff9e3efce644c56d52bb5297272e005d8241", size = 154746, upload-time = "2025-08-26T00:11:02.819Z" } sdist = { url = "https://files.pythonhosted.org/packages/66/1c/beeaf8dd489dad13ca4310a7bd9c601da6c6831e7c8ac61a45aadccb742d/plexapi-4.16.1.tar.gz", hash = "sha256:8e62d727e67b69994770196cd83a57783e9194d735aa347f682b4534ce6f0565", size = 153460 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/c3/1c/9fdaa0e1f797dde3c3cb56d7b222109009f70380e7f49fc0ff42d5705409/plexapi-4.17.1-py3-none-any.whl", hash = "sha256:9d51adb112a2b0b7aa91a928c8b5c0dfffc0d51108cea67d86fea08cee06c998", size = 166861, upload-time = "2025-08-26T00:11:00.89Z" }, { url = "https://files.pythonhosted.org/packages/ba/07/20b8a50b8b78374a49685dfe12739c27a9cd440b13913c2cbeeb50470d1e/PlexAPI-4.16.1-py3-none-any.whl", hash = "sha256:87432226c4cd682b5780b01e8def313285c52bdd57c8e72f66a5cef73ce64530", size = 165325 },
] ]
[[package]] [[package]]
name = "pluggy" name = "pluggy"
version = "1.6.0" version = "1.5.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 },
] ]
[[package]] [[package]]
name = "pydantic" name = "pydantic"
version = "2.11.7" version = "2.10.6"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "annotated-types" }, { name = "annotated-types" },
{ name = "pydantic-core" }, { name = "pydantic-core" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
{ name = "typing-inspection" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } sdist = { url = "https://files.pythonhosted.org/packages/b7/ae/d5220c5c52b158b1de7ca89fc5edb72f304a70a4c540c84c8844bf4008de/pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236", size = 761681 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, { url = "https://files.pythonhosted.org/packages/f4/3c/8cc1cc84deffa6e25d2d0c688ebb80635dfdbf1dbea3e30c541c8cf4d860/pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584", size = 431696 },
] ]
[[package]] [[package]]
name = "pydantic-core" name = "pydantic-core"
version = "2.33.2" version = "2.27.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" }, { url = "https://files.pythonhosted.org/packages/d6/74/51c8a5482ca447871c93e142d9d4a92ead74de6c8dc5e66733e22c9bba89/pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0", size = 1893127 },
{ url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" }, { url = "https://files.pythonhosted.org/packages/d3/f3/c97e80721735868313c58b89d2de85fa80fe8dfeeed84dc51598b92a135e/pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef", size = 1811340 },
{ url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" }, { url = "https://files.pythonhosted.org/packages/9e/91/840ec1375e686dbae1bd80a9e46c26a1e0083e1186abc610efa3d9a36180/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7", size = 1822900 },
{ url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" }, { url = "https://files.pythonhosted.org/packages/f6/31/4240bc96025035500c18adc149aa6ffdf1a0062a4b525c932065ceb4d868/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934", size = 1869177 },
{ url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" }, { url = "https://files.pythonhosted.org/packages/fa/20/02fbaadb7808be578317015c462655c317a77a7c8f0ef274bc016a784c54/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6", size = 2038046 },
{ url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" }, { url = "https://files.pythonhosted.org/packages/06/86/7f306b904e6c9eccf0668248b3f272090e49c275bc488a7b88b0823444a4/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c", size = 2685386 },
{ url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" }, { url = "https://files.pythonhosted.org/packages/8d/f0/49129b27c43396581a635d8710dae54a791b17dfc50c70164866bbf865e3/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2", size = 1997060 },
{ url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" }, { url = "https://files.pythonhosted.org/packages/0d/0f/943b4af7cd416c477fd40b187036c4f89b416a33d3cc0ab7b82708a667aa/pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4", size = 2004870 },
{ url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" }, { url = "https://files.pythonhosted.org/packages/35/40/aea70b5b1a63911c53a4c8117c0a828d6790483f858041f47bab0b779f44/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3", size = 1999822 },
{ url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" }, { url = "https://files.pythonhosted.org/packages/f2/b3/807b94fd337d58effc5498fd1a7a4d9d59af4133e83e32ae39a96fddec9d/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4", size = 2130364 },
{ url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" }, { url = "https://files.pythonhosted.org/packages/fc/df/791c827cd4ee6efd59248dca9369fb35e80a9484462c33c6649a8d02b565/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57", size = 2158303 },
{ url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" }, { url = "https://files.pythonhosted.org/packages/9b/67/4e197c300976af185b7cef4c02203e175fb127e414125916bf1128b639a9/pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc", size = 1834064 },
{ url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" }, { url = "https://files.pythonhosted.org/packages/1f/ea/cd7209a889163b8dcca139fe32b9687dd05249161a3edda62860430457a5/pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9", size = 1989046 },
{ url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" }, { url = "https://files.pythonhosted.org/packages/bc/49/c54baab2f4658c26ac633d798dab66b4c3a9bbf47cff5284e9c182f4137a/pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b", size = 1885092 },
{ url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, { url = "https://files.pythonhosted.org/packages/41/b1/9bc383f48f8002f99104e3acff6cba1231b29ef76cfa45d1506a5cad1f84/pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b", size = 1892709 },
{ url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, { url = "https://files.pythonhosted.org/packages/10/6c/e62b8657b834f3eb2961b49ec8e301eb99946245e70bf42c8817350cbefc/pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154", size = 1811273 },
{ url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, { url = "https://files.pythonhosted.org/packages/ba/15/52cfe49c8c986e081b863b102d6b859d9defc63446b642ccbbb3742bf371/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9", size = 1823027 },
{ url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, { url = "https://files.pythonhosted.org/packages/b1/1c/b6f402cfc18ec0024120602bdbcebc7bdd5b856528c013bd4d13865ca473/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9", size = 1868888 },
{ url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, { url = "https://files.pythonhosted.org/packages/bd/7b/8cb75b66ac37bc2975a3b7de99f3c6f355fcc4d89820b61dffa8f1e81677/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1", size = 2037738 },
{ url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, { url = "https://files.pythonhosted.org/packages/c8/f1/786d8fe78970a06f61df22cba58e365ce304bf9b9f46cc71c8c424e0c334/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a", size = 2685138 },
{ url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, { url = "https://files.pythonhosted.org/packages/a6/74/d12b2cd841d8724dc8ffb13fc5cef86566a53ed358103150209ecd5d1999/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e", size = 1997025 },
{ url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, { url = "https://files.pythonhosted.org/packages/a0/6e/940bcd631bc4d9a06c9539b51f070b66e8f370ed0933f392db6ff350d873/pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4", size = 2004633 },
{ url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, { url = "https://files.pythonhosted.org/packages/50/cc/a46b34f1708d82498c227d5d80ce615b2dd502ddcfd8376fc14a36655af1/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27", size = 1999404 },
{ url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, { url = "https://files.pythonhosted.org/packages/ca/2d/c365cfa930ed23bc58c41463bae347d1005537dc8db79e998af8ba28d35e/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee", size = 2130130 },
{ url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, { url = "https://files.pythonhosted.org/packages/f4/d7/eb64d015c350b7cdb371145b54d96c919d4db516817f31cd1c650cae3b21/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1", size = 2157946 },
{ url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, { url = "https://files.pythonhosted.org/packages/a4/99/bddde3ddde76c03b65dfd5a66ab436c4e58ffc42927d4ff1198ffbf96f5f/pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130", size = 1834387 },
{ url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, { url = "https://files.pythonhosted.org/packages/71/47/82b5e846e01b26ac6f1893d3c5f9f3a2eb6ba79be26eef0b759b4fe72946/pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee", size = 1990453 },
{ url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, { url = "https://files.pythonhosted.org/packages/51/b2/b2b50d5ecf21acf870190ae5d093602d95f66c9c31f9d5de6062eb329ad1/pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b", size = 1885186 },
{ url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" },
{ url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" },
{ url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" },
]
[[package]]
name = "pygments"
version = "2.19.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
] ]
[[package]] [[package]]
name = "pytest" name = "pytest"
version = "8.4.2" version = "8.3.4"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" }, { name = "colorama", marker = "sys_platform == 'win32'" },
{ name = "iniconfig" }, { name = "iniconfig" },
{ name = "packaging" }, { name = "packaging" },
{ name = "pluggy" }, { name = "pluggy" },
{ name = "pygments" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } sdist = { url = "https://files.pythonhosted.org/packages/05/35/30e0d83068951d90a01852cb1cef56e5d8a09d20c7f511634cc2f7e0372a/pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761", size = 1445919 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, { url = "https://files.pythonhosted.org/packages/11/92/76a1c94d3afee238333bc0a42b82935dd8f9cf8ce9e336ff87ee14d9e1cf/pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6", size = 343083 },
] ]
[[package]] [[package]]
name = "python-dotenv" name = "python-dotenv"
version = "1.1.1" version = "1.0.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } sdist = { url = "https://files.pythonhosted.org/packages/31/06/1ef763af20d0572c032fa22882cfbfb005fba6e7300715a37840858c919e/python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba", size = 37399 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, { url = "https://files.pythonhosted.org/packages/44/2f/62ea1c8b593f4e093cc1a7768f0d46112107e790c3e478532329e434f00b/python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a", size = 19482 },
] ]
[[package]] [[package]]
name = "requests" name = "requests"
version = "2.32.5" version = "2.32.3"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "certifi" }, { name = "certifi" },
@ -324,84 +247,59 @@ dependencies = [
{ name = "idna" }, { name = "idna" },
{ name = "urllib3" }, { name = "urllib3" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 },
] ]
[[package]] [[package]]
name = "ruff" name = "ruff"
version = "0.13.0" version = "0.9.6"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/6e/1a/1f4b722862840295bcaba8c9e5261572347509548faaa99b2d57ee7bfe6a/ruff-0.13.0.tar.gz", hash = "sha256:5b4b1ee7eb35afae128ab94459b13b2baaed282b1fb0f472a73c82c996c8ae60", size = 5372863, upload-time = "2025-09-10T16:25:37.917Z" } sdist = { url = "https://files.pythonhosted.org/packages/2a/e1/e265aba384343dd8ddd3083f5e33536cd17e1566c41453a5517b5dd443be/ruff-0.9.6.tar.gz", hash = "sha256:81761592f72b620ec8fa1068a6fd00e98a5ebee342a3642efd84454f3031dca9", size = 3639454 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/ac/fe/6f87b419dbe166fd30a991390221f14c5b68946f389ea07913e1719741e0/ruff-0.13.0-py3-none-linux_armv6l.whl", hash = "sha256:137f3d65d58ee828ae136a12d1dc33d992773d8f7644bc6b82714570f31b2004", size = 12187826, upload-time = "2025-09-10T16:24:39.5Z" }, { url = "https://files.pythonhosted.org/packages/76/e3/3d2c022e687e18cf5d93d6bfa2722d46afc64eaa438c7fbbdd603b3597be/ruff-0.9.6-py3-none-linux_armv6l.whl", hash = "sha256:2f218f356dd2d995839f1941322ff021c72a492c470f0b26a34f844c29cdf5ba", size = 11714128 },
{ url = "https://files.pythonhosted.org/packages/e4/25/c92296b1fc36d2499e12b74a3fdb230f77af7bdf048fad7b0a62e94ed56a/ruff-0.13.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:21ae48151b66e71fd111b7d79f9ad358814ed58c339631450c66a4be33cc28b9", size = 12933428, upload-time = "2025-09-10T16:24:43.866Z" }, { url = "https://files.pythonhosted.org/packages/e1/22/aff073b70f95c052e5c58153cba735748c9e70107a77d03420d7850710a0/ruff-0.9.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b908ff4df65dad7b251c9968a2e4560836d8f5487c2f0cc238321ed951ea0504", size = 11682539 },
{ url = "https://files.pythonhosted.org/packages/44/cf/40bc7221a949470307d9c35b4ef5810c294e6cfa3caafb57d882731a9f42/ruff-0.13.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:64de45f4ca5441209e41742d527944635a05a6e7c05798904f39c85bafa819e3", size = 12095543, upload-time = "2025-09-10T16:24:46.638Z" }, { url = "https://files.pythonhosted.org/packages/75/a7/f5b7390afd98a7918582a3d256cd3e78ba0a26165a467c1820084587cbf9/ruff-0.9.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b109c0ad2ececf42e75fa99dc4043ff72a357436bb171900714a9ea581ddef83", size = 11132512 },
{ url = "https://files.pythonhosted.org/packages/f1/03/8b5ff2a211efb68c63a1d03d157e924997ada87d01bebffbd13a0f3fcdeb/ruff-0.13.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b2c653ae9b9d46e0ef62fc6fbf5b979bda20a0b1d2b22f8f7eb0cde9f4963b8", size = 12312489, upload-time = "2025-09-10T16:24:49.556Z" }, { url = "https://files.pythonhosted.org/packages/a6/e3/45de13ef65047fea2e33f7e573d848206e15c715e5cd56095589a7733d04/ruff-0.9.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1de4367cca3dac99bcbd15c161404e849bb0bfd543664db39232648dc00112dc", size = 11929275 },
{ url = "https://files.pythonhosted.org/packages/37/fc/2336ef6d5e9c8d8ea8305c5f91e767d795cd4fc171a6d97ef38a5302dadc/ruff-0.13.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cec632534332062bc9eb5884a267b689085a1afea9801bf94e3ba7498a2d207", size = 11991631, upload-time = "2025-09-10T16:24:53.439Z" }, { url = "https://files.pythonhosted.org/packages/7d/f2/23d04cd6c43b2e641ab961ade8d0b5edb212ecebd112506188c91f2a6e6c/ruff-0.9.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3ee4d7c2c92ddfdaedf0bf31b2b176fa7aa8950efc454628d477394d35638b", size = 11466502 },
{ url = "https://files.pythonhosted.org/packages/39/7f/f6d574d100fca83d32637d7f5541bea2f5e473c40020bbc7fc4a4d5b7294/ruff-0.13.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd628101d9f7d122e120ac7c17e0a0f468b19bc925501dbe03c1cb7f5415b24", size = 13720602, upload-time = "2025-09-10T16:24:56.392Z" }, { url = "https://files.pythonhosted.org/packages/b5/6f/3a8cf166f2d7f1627dd2201e6cbc4cb81f8b7d58099348f0c1ff7b733792/ruff-0.9.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dc1edd1775270e6aa2386119aea692039781429f0be1e0949ea5884e011aa8e", size = 12676364 },
{ url = "https://files.pythonhosted.org/packages/fd/c8/a8a5b81d8729b5d1f663348d11e2a9d65a7a9bd3c399763b1a51c72be1ce/ruff-0.13.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:afe37db8e1466acb173bb2a39ca92df00570e0fd7c94c72d87b51b21bb63efea", size = 14697751, upload-time = "2025-09-10T16:24:59.89Z" }, { url = "https://files.pythonhosted.org/packages/f5/c4/db52e2189983c70114ff2b7e3997e48c8318af44fe83e1ce9517570a50c6/ruff-0.9.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4a091729086dffa4bd070aa5dab7e39cc6b9d62eb2bef8f3d91172d30d599666", size = 13335518 },
{ url = "https://files.pythonhosted.org/packages/57/f5/183ec292272ce7ec5e882aea74937f7288e88ecb500198b832c24debc6d3/ruff-0.13.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f96a8d90bb258d7d3358b372905fe7333aaacf6c39e2408b9f8ba181f4b6ef2", size = 14095317, upload-time = "2025-09-10T16:25:03.025Z" }, { url = "https://files.pythonhosted.org/packages/66/44/545f8a4d136830f08f4d24324e7db957c5374bf3a3f7a6c0bc7be4623a37/ruff-0.9.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1bbc6808bf7b15796cef0815e1dfb796fbd383e7dbd4334709642649625e7c5", size = 12823287 },
{ url = "https://files.pythonhosted.org/packages/9f/8d/7f9771c971724701af7926c14dab31754e7b303d127b0d3f01116faef456/ruff-0.13.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b5e3d883e4f924c5298e3f2ee0f3085819c14f68d1e5b6715597681433f153", size = 13144418, upload-time = "2025-09-10T16:25:06.272Z" }, { url = "https://files.pythonhosted.org/packages/c5/26/8208ef9ee7431032c143649a9967c3ae1aae4257d95e6f8519f07309aa66/ruff-0.9.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:589d1d9f25b5754ff230dce914a174a7c951a85a4e9270613a2b74231fdac2f5", size = 14592374 },
{ url = "https://files.pythonhosted.org/packages/a8/a6/7985ad1778e60922d4bef546688cd8a25822c58873e9ff30189cfe5dc4ab/ruff-0.13.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03447f3d18479df3d24917a92d768a89f873a7181a064858ea90a804a7538991", size = 13370843, upload-time = "2025-09-10T16:25:09.965Z" }, { url = "https://files.pythonhosted.org/packages/31/70/e917781e55ff39c5b5208bda384fd397ffd76605e68544d71a7e40944945/ruff-0.9.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc61dd5131742e21103fbbdcad683a8813be0e3c204472d520d9a5021ca8b217", size = 12500173 },
{ url = "https://files.pythonhosted.org/packages/64/1c/bafdd5a7a05a50cc51d9f5711da704942d8dd62df3d8c70c311e98ce9f8a/ruff-0.13.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:fbc6b1934eb1c0033da427c805e27d164bb713f8e273a024a7e86176d7f462cf", size = 13321891, upload-time = "2025-09-10T16:25:12.969Z" }, { url = "https://files.pythonhosted.org/packages/84/f5/e4ddee07660f5a9622a9c2b639afd8f3104988dc4f6ba0b73ffacffa9a8c/ruff-0.9.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5e2d9126161d0357e5c8f30b0bd6168d2c3872372f14481136d13de9937f79b6", size = 11906555 },
{ url = "https://files.pythonhosted.org/packages/bc/3e/7817f989cb9725ef7e8d2cee74186bf90555279e119de50c750c4b7a72fe/ruff-0.13.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a8ab6a3e03665d39d4a25ee199d207a488724f022db0e1fe4002968abdb8001b", size = 12119119, upload-time = "2025-09-10T16:25:16.621Z" }, { url = "https://files.pythonhosted.org/packages/f1/2b/6ff2fe383667075eef8656b9892e73dd9b119b5e3add51298628b87f6429/ruff-0.9.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:68660eab1a8e65babb5229a1f97b46e3120923757a68b5413d8561f8a85d4897", size = 11538958 },
{ url = "https://files.pythonhosted.org/packages/58/07/9df080742e8d1080e60c426dce6e96a8faf9a371e2ce22eef662e3839c95/ruff-0.13.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d2a5c62f8ccc6dd2fe259917482de7275cecc86141ee10432727c4816235bc41", size = 11961594, upload-time = "2025-09-10T16:25:19.49Z" }, { url = "https://files.pythonhosted.org/packages/3c/db/98e59e90de45d1eb46649151c10a062d5707b5b7f76f64eb1e29edf6ebb1/ruff-0.9.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c4cae6c4cc7b9b4017c71114115db0445b00a16de3bcde0946273e8392856f08", size = 12117247 },
{ url = "https://files.pythonhosted.org/packages/6a/f4/ae1185349197d26a2316840cb4d6c3fba61d4ac36ed728bf0228b222d71f/ruff-0.13.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b7b85ca27aeeb1ab421bc787009831cffe6048faae08ad80867edab9f2760945", size = 12933377, upload-time = "2025-09-10T16:25:22.371Z" }, { url = "https://files.pythonhosted.org/packages/ec/bc/54e38f6d219013a9204a5a2015c09e7a8c36cedcd50a4b01ac69a550b9d9/ruff-0.9.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:19f505b643228b417c1111a2a536424ddde0db4ef9023b9e04a46ed8a1cb4656", size = 12554647 },
{ url = "https://files.pythonhosted.org/packages/b6/39/e776c10a3b349fc8209a905bfb327831d7516f6058339a613a8d2aaecacd/ruff-0.13.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:79ea0c44a3032af768cabfd9616e44c24303af49d633b43e3a5096e009ebe823", size = 13418555, upload-time = "2025-09-10T16:25:25.681Z" }, { url = "https://files.pythonhosted.org/packages/a5/7d/7b461ab0e2404293c0627125bb70ac642c2e8d55bf590f6fce85f508f1b2/ruff-0.9.6-py3-none-win32.whl", hash = "sha256:194d8402bceef1b31164909540a597e0d913c0e4952015a5b40e28c146121b5d", size = 9949214 },
{ url = "https://files.pythonhosted.org/packages/46/09/dca8df3d48e8b3f4202bf20b1658898e74b6442ac835bfe2c1816d926697/ruff-0.13.0-py3-none-win32.whl", hash = "sha256:4e473e8f0e6a04e4113f2e1de12a5039579892329ecc49958424e5568ef4f768", size = 12141613, upload-time = "2025-09-10T16:25:28.664Z" }, { url = "https://files.pythonhosted.org/packages/ee/30/c3cee10f915ed75a5c29c1e57311282d1a15855551a64795c1b2bbe5cf37/ruff-0.9.6-py3-none-win_amd64.whl", hash = "sha256:03482d5c09d90d4ee3f40d97578423698ad895c87314c4de39ed2af945633caa", size = 10999914 },
{ url = "https://files.pythonhosted.org/packages/61/21/0647eb71ed99b888ad50e44d8ec65d7148babc0e242d531a499a0bbcda5f/ruff-0.13.0-py3-none-win_amd64.whl", hash = "sha256:48e5c25c7a3713eea9ce755995767f4dcd1b0b9599b638b12946e892123d1efb", size = 13258250, upload-time = "2025-09-10T16:25:31.773Z" }, { url = "https://files.pythonhosted.org/packages/e8/a8/d71f44b93e3aa86ae232af1f2126ca7b95c0f515ec135462b3e1f351441c/ruff-0.9.6-py3-none-win_arm64.whl", hash = "sha256:0e2bb706a2be7ddfea4a4af918562fdc1bcb16df255e5fa595bbd800ce322a5a", size = 10177499 },
{ url = "https://files.pythonhosted.org/packages/e1/a3/03216a6a86c706df54422612981fb0f9041dbb452c3401501d4a22b942c9/ruff-0.13.0-py3-none-win_arm64.whl", hash = "sha256:ab80525317b1e1d38614addec8ac954f1b3e662de9d59114ecbf771d00cf613e", size = 12312357, upload-time = "2025-09-10T16:25:35.595Z" },
]
[[package]]
name = "types-requests"
version = "2.32.4.20250809"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ed/b0/9355adb86ec84d057fea765e4c49cce592aaf3d5117ce5609a95a7fc3dac/types_requests-2.32.4.20250809.tar.gz", hash = "sha256:d8060de1c8ee599311f56ff58010fb4902f462a1470802cf9f6ed27bc46c4df3", size = 23027, upload-time = "2025-08-09T03:17:10.664Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2b/6f/ec0012be842b1d888d46884ac5558fd62aeae1f0ec4f7a581433d890d4b5/types_requests-2.32.4.20250809-py3-none-any.whl", hash = "sha256:f73d1832fb519ece02c85b1f09d5f0dd3108938e7d47e7f94bbfa18a6782b163", size = 20644, upload-time = "2025-08-09T03:17:09.716Z" },
] ]
[[package]] [[package]]
name = "typing-extensions" name = "typing-extensions"
version = "4.15.0" version = "4.12.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 },
]
[[package]]
name = "typing-inspection"
version = "0.4.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" },
] ]
[[package]] [[package]]
name = "urllib3" name = "urllib3"
version = "2.5.0" version = "2.3.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 },
] ]
[[package]] [[package]]
name = "win32-setctime" name = "win32-setctime"
version = "1.2.0" version = "1.2.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/705086c9d734d3b663af0e9bb3d4de6578d08f46b1b101c2442fd9aecaa2/win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0", size = 4867, upload-time = "2024-12-07T15:28:28.314Z" } sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/705086c9d734d3b663af0e9bb3d4de6578d08f46b1b101c2442fd9aecaa2/win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0", size = 4867 }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083, upload-time = "2024-12-07T15:28:26.465Z" }, { url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083 },
] ]