Compare commits

...

39 Commits
v7.0.4 ... main

Author SHA1 Message Date
Bel LaPointe a1adf97bfb no dry 2025-09-25 15:56:10 -04:00
Bel LaPointe 8dea290dd1 nosqueaky 2025-09-25 15:00:14 -04:00
Bel LaPointe 90358f0176 allowlist 2025-09-25 14:55:50 -04:00
Bel LaPointe 96239d6704 runs 2025-09-25 14:55:11 -04:00
Bel LaPointe e8e6d6469f at least it bombs now 2025-09-25 14:40:30 -04:00
Bel LaPointe 0dc338df6f quote 2025-09-25 14:36:56 -04:00
Bel LaPointe 72df1cd0ad env 2025-09-25 14:36:06 -04:00
Bel LaPointe 8af7fe00f9 me 2025-09-25 14:34:41 -04:00
Luis Garcia 6eefedfc40 Tag version 8.3.0
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-09-11 17:43:32 -06:00
Luigi311 2914dbb81c
Merge pull request #311 from luigi311/deps
Update dependencies
2025-09-11 17:42:39 -06:00
Luis Garcia 52c780d8a7 Update dependencies
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-09-11 17:39:18 -06:00
Luigi311 0276e7c8eb
Merge pull request #309 from luigi311/pathlib
Utilize pathlib for universal location file extraction
2025-09-11 17:31:28 -06:00
Luis Garcia bf50defcb5 Use pathlib to extract file/folder to fix windows paths
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-09-10 21:37:17 -06:00
Luigi311 71d753878e
Merge pull request #298 from luigi311/identifies_logging
Identifies logging
2025-07-15 01:09:07 -06:00
Luis Garcia 21c530d956 Plex: Log missing identifiers information
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-07-14 23:10:06 +00:00
Luis Garcia 142c9df6e9 Jellyfin/Emby: Log more missing identifiers information
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-07-14 22:53:00 +00:00
Luis Garcia 629f50ecdc Tag version 8.2.0
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-07-14 19:10:36 +00:00
Luigi311 3e2450b5fd
Merge pull request #296 from luigi311/fix_emby
Jellyfin/Emby: Add fallback for played percentage if missing
2025-07-14 13:09:58 -06:00
Luis Garcia 0de5e86837 Jellyfin/Emby: Add fallback for played percentage if missing
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-07-14 17:21:55 +00:00
Luis Garcia 33a719f693 Tag version 8.1.0
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-07-12 00:17:46 +00:00
Luigi311 9ff985a848
Merge pull request #292 from luigi311/sync_timestamps
Jellyfin/Emby: Sync across the view times
2025-07-11 18:16:56 -06:00
Luis Garcia 5501e21aa8 Jellyfin/Emby: Use the same endpoint for marking as for partials to fix emby
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-07-12 00:14:27 +00:00
Luis Garcia 2208d91d07 README: Add sync view dates
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-07-11 23:48:33 +00:00
Luis Garcia 75f7f576ac Jellyfin/Emby: Sync across the view times
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-07-11 23:48:23 +00:00
Luis Garcia 24f56769f9 Tag version 8.0.0
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-07-11 18:31:19 +00:00
Luigi311 29e4f224dc
Merge pull request #233 from luigi311/reuse_server1
Reuse server_1_watched
2025-07-11 12:29:35 -06:00
Luis Garcia bdb58918e7 Jellyfin/Emby: Log missing identifiers to debug
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-07-11 18:06:20 +00:00
Luis Garcia c3be980eea Reuse server_1_watched history to avoid duplication
Keeps the server_1_watched history that way it does not need to fetch
the same results again each time it needs to sync to another server

Signed-off-by: Luis Garcia <git@luigi311.com>
2025-07-11 17:35:53 +00:00
Luis Garcia c1a26dd73b README: Add ENV_FILE option
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-07-11 17:01:59 +00:00
Luigi311 e5d5f11f33
Merge pull request #291 from luigi311/deps
Deps: Update deps
2025-07-11 10:50:28 -06:00
Luis Garcia 616ca92d5e Deps: Update deps
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-07-11 16:43:55 +00:00
Luigi311 b2b214c987
Merge pull request #290 from luigi311/docker_push
CI: Simplify docker
2025-07-11 10:40:58 -06:00
Luis Garcia 07542b498e CI: UV sync frozen and no extra tools
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-07-11 10:37:26 -06:00
Luis Garcia 9e53c0f8e2 CI: Simplify docker build push
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-07-11 10:32:52 -06:00
Luigi311 98266de678
Merge pull request #279 from luigi311/env_file
Add support for env file support, set via ENV_FILE
2025-07-11 10:24:40 -06:00
Luis Garcia 9d4f3dd432 Move generate locations/guids to the class level
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-06-05 17:04:05 -06:00
Luis Garcia cc9b84fefa Update .env.sample
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-05-18 20:38:19 +00:00
Luis Garcia c76bb3b355 CI: Use ENV_FILE
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-05-18 20:25:13 +00:00
Luis Garcia 544649effd Add support for env file support, set via ENV_FILE
Signed-off-by: Luis Garcia <git@luigi311.com>
2025-05-18 20:25:13 +00:00
17 changed files with 1100 additions and 551 deletions

117
.env Normal file
View File

@ -0,0 +1,117 @@
# Global Settings
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
DRYRUN = "False"
## Debugging level, "info" is default, "debug" is more verbose
#DEBUG_LEVEL = "DEBUG"
DEBUG_LEVEL = "INFO"
## If set to true then the script will only run once and then exit
RUN_ONLY_ONCE = "True"
## How often to run the script in seconds
SLEEP_DURATION = "60"
## Log file where all output will be written to
LOG_FILE = "/mnt/log.log"
## Mark file where all shows/movies that have been marked as played will be written to
MARK_FILE = "/mnt/mark.log"
## Timeout for requests for jellyfin
REQUEST_TIMEOUT = 300
## Max threads for processing
MAX_THREADS = 1
## Generate guids/locations
## These are slow processes, so this is a way to speed things up
## If media servers are using the same files then you can enable only generate locations
## If media servers are using different files then you can enable only generate guids
## Default is to generate both
GENERATE_GUIDS = "True"
GENERATE_LOCATIONS = "True"
## Map usernames between servers in the event that they are different, order does not matter
## Comma separated for multiple options
# jellyfin: plex,plex
#USER_MAPPING = { "belandbroc": "debila,belan49", "debila,belan49": "belandbroc", "debila": "belandbroc", "belan49": "belandbroc" }
USER_MAPPING = { "belandbroc":"debila", "debila":"belandbroc", "debila":"belandbroc" }
## Map libraries between servers in the event that they are different, order does not matter
## Comma separated for multiple options
LIBRARY_MAPPING = { "TV Shows": "Scratch TV Shows", "Scratch TV Shows": "TV Shows" }
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
## Comma separated for multiple options
#BLACKLIST_LIBRARY = ""
WHITELIST_LIBRARY = "TV Shows,Scratch TV Shows,Movies"
#BLACKLIST_LIBRARY_TYPE = ""
#WHITELIST_LIBRARY_TYPE = ""
#BLACKLIST_USERS = ""
WHITELIST_USERS = "belandbroc,debila"
# Plex
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
## Comma separated list for multiple servers
PLEX_BASEURL = "http://192.168.0.86:32400"
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
## Comma separated list for multiple servers
# PLEX_TOKEN = "vPGyuy6zWVCz6ZFyy8x1"
# # debila=debilapointe@gmail
PLEX_TOKEN = "S7gbVzAzH4ypN-4K7ta5"
# me
## If not using plex token then use username and password of the server admin along with the servername
## Comma separated for multiple options
#PLEX_USERNAME = "squeaky2x3@gmail.com"
#PLEX_PASSWORD = "qoDuGNsGsWRurOd5QFdRy2@"
#PLEX_SERVERNAME = "Scratch"
## Skip hostname validation for ssl certificates.
## Set to True if running into ssl certificate errors
SSL_BYPASS = "True"
# Jellyfin
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
## Comma separated list for multiple servers
JELLYFIN_BASEURL = "https://jellyfin.home.blapointe.com"
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
## Comma separated list for multiple servers
JELLYFIN_TOKEN = "1dc766ce6ca44c53b773263a06889b96"
# # Emby
#
# ## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
# ## Comma seperated list for multiple servers
# EMBY_BASEURL = "http://localhost:8097"
#
# ## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
# ## Comma seperated list for multiple servers
# EMBY_TOKEN = "SuperSecretToken"
# Syncing Options
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
SYNC_FROM_PLEX_TO_PLEX = "False"
#SYNC_FROM_PLEX_TO_EMBY = "False"
SYNC_FROM_JELLYFIN_TO_PLEX = "False"
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "False"
#SYNC_FROM_JELLYFIN_TO_EMBY = "False"
#SYNC_FROM_EMBY_TO_PLEX = "False"
#SYNC_FROM_EMBY_TO_JELLYFIN = "False"
#SYNC_FROM_EMBY_TO_EMBY = "False"

View File

@ -3,11 +3,8 @@
## Do not mark any shows/movies as played and instead just output to log if they would of been marked. ## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
DRYRUN = "True" DRYRUN = "True"
## Additional logging information
DEBUG = "False"
## Debugging level, "info" is default, "debug" is more verbose ## Debugging level, "info" is default, "debug" is more verbose
DEBUG_LEVEL = "info" DEBUG_LEVEL = "INFO"
## If set to true then the script will only run once and then exit ## If set to true then the script will only run once and then exit
RUN_ONLY_ONCE = "False" RUN_ONLY_ONCE = "False"
@ -16,7 +13,7 @@ RUN_ONLY_ONCE = "False"
SLEEP_DURATION = "3600" SLEEP_DURATION = "3600"
## Log file where all output will be written to ## Log file where all output will be written to
LOGFILE = "log.log" LOG_FILE = "log.log"
## Mark file where all shows/movies that have been marked as played will be written to ## Mark file where all shows/movies that have been marked as played will be written to
MARK_FILE = "mark.log" MARK_FILE = "mark.log"
@ -24,26 +21,24 @@ MARK_FILE = "mark.log"
## Timeout for requests for jellyfin ## Timeout for requests for jellyfin
REQUEST_TIMEOUT = 300 REQUEST_TIMEOUT = 300
## Generate guids
## Generating guids is a slow process, so this is a way to speed up the process
## by using the location only, useful when using same files on multiple servers
GENERATE_GUIDS = "True"
## Generate locations
## Generating locations is a slow process, so this is a way to speed up the process
## by using the guid only, useful when using different files on multiple servers
GENERATE_LOCATIONS = "True"
## Max threads for processing ## Max threads for processing
MAX_THREADS = 2 MAX_THREADS = 1
## Generate guids/locations
## These are slow processes, so this is a way to speed things up
## If media servers are using the same files then you can enable only generate locations
## If media servers are using different files then you can enable only generate guids
## Default is to generate both
GENERATE_GUIDS = "True"
GENERATE_LOCATIONS = "True"
## Map usernames between servers in the event that they are different, order does not matter ## Map usernames between servers in the event that they are different, order does not matter
## Comma separated for multiple options ## Comma separated for multiple options
#USER_MAPPING = { "testuser2": "testuser3", "testuser1":"testuser4" } USER_MAPPING = { "Username": "User", "Second User": "User Dos" }
## Map libraries between servers in the event that they are different, order does not matter ## Map libraries between servers in the event that they are different, order does not matter
## Comma separated for multiple options ## Comma separated for multiple options
#LIBRARY_MAPPING = { "Shows": "TV Shows", "Movie": "Movies" } LIBRARY_MAPPING = { "Shows": "TV Shows", "Movie": "Movies" }
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded. ## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
## Comma separated for multiple options ## Comma separated for multiple options
@ -52,7 +47,7 @@ MAX_THREADS = 2
#BLACKLIST_LIBRARY_TYPE = "" #BLACKLIST_LIBRARY_TYPE = ""
#WHITELIST_LIBRARY_TYPE = "" #WHITELIST_LIBRARY_TYPE = ""
#BLACKLIST_USERS = "" #BLACKLIST_USERS = ""
WHITELIST_USERS = "testuser1,testuser2" #WHITELIST_USERS = ""
# Plex # Plex
@ -96,7 +91,7 @@ EMBY_BASEURL = "http://localhost:8097"
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key ## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
## Comma seperated list for multiple servers ## Comma seperated list for multiple servers
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515" EMBY_TOKEN = "SuperSecretToken"
# Syncing Options # Syncing Options

View File

@ -20,7 +20,7 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install uv - name: Install uv
uses: astral-sh/setup-uv@v5 uses: astral-sh/setup-uv@v6
- name: "Set up Python" - name: "Set up Python"
uses: actions/setup-python@v5 uses: actions/setup-python@v5
@ -28,7 +28,7 @@ jobs:
python-version-file: ".python-version" python-version-file: ".python-version"
- name: "Install dependencies" - name: "Install dependencies"
run: uv sync --all-extras --dev run: uv sync --frozen
- name: "Run tests" - name: "Run tests"
run: uv run pytest -vvv run: uv run pytest -vvv
@ -39,7 +39,7 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install uv - name: Install uv
uses: astral-sh/setup-uv@v5 uses: astral-sh/setup-uv@v6
- name: "Set up Python" - name: "Set up Python"
uses: actions/setup-python@v5 uses: actions/setup-python@v5
@ -48,7 +48,7 @@ jobs:
- name: "Install dependencies" - name: "Install dependencies"
run: | run: |
uv sync --all-extras --dev uv sync --frozen
sudo apt update && sudo apt install -y docker-compose sudo apt update && sudo apt install -y docker-compose
- name: "Checkout JellyPlex-Watched-CI" - name: "Checkout JellyPlex-Watched-CI"
@ -70,40 +70,35 @@ jobs:
- name: "Test Plex" - name: "Test Plex"
run: | run: |
mv test/ci_plex.env .env ENV_FILE="test/ci_plex.env" uv run main.py
uv run main.py
uv run test/validate_ci_marklog.py --plex uv run test/validate_ci_marklog.py --plex
rm mark.log rm mark.log
- name: "Test Jellyfin" - name: "Test Jellyfin"
run: | run: |
mv test/ci_jellyfin.env .env ENV_FILE="test/ci_jellyfin.env" uv run main.py
uv run main.py
uv run test/validate_ci_marklog.py --jellyfin uv run test/validate_ci_marklog.py --jellyfin
rm mark.log rm mark.log
- name: "Test Emby" - name: "Test Emby"
run: | run: |
mv test/ci_emby.env .env ENV_FILE="test/ci_emby.env" uv run main.py
uv run main.py
uv run test/validate_ci_marklog.py --emby uv run test/validate_ci_marklog.py --emby
rm mark.log rm mark.log
- name: "Test Guids" - name: "Test Guids"
run: | run: |
mv test/ci_guids.env .env ENV_FILE="test/ci_guids.env" uv run main.py
uv run main.py
uv run test/validate_ci_marklog.py --guids uv run test/validate_ci_marklog.py --guids
rm mark.log rm mark.log
- name: "Test Locations" - name: "Test Locations"
run: | run: |
mv test/ci_locations.env .env ENV_FILE="test/ci_locations.env" uv run main.py
uv run main.py
uv run test/validate_ci_marklog.py --locations uv run test/validate_ci_marklog.py --locations
rm mark.log rm mark.log
@ -111,11 +106,10 @@ jobs:
- name: "Test writing to the servers" - name: "Test writing to the servers"
run: | run: |
# Test writing to the servers # Test writing to the servers
mv test/ci_write.env .env ENV_FILE="test/ci_write.env" uv run main.py
uv run main.py
# Test again to test if it can handle existing data # Test again to test if it can handle existing data
uv run main.py ENV_FILE="test/ci_write.env" uv run main.py
uv run test/validate_ci_marklog.py --write uv run test/validate_ci_marklog.py --write
@ -178,6 +172,7 @@ jobs:
env: env:
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
if: "${{ env.DOCKER_USERNAME != '' }}" if: "${{ env.DOCKER_USERNAME != '' }}"
id: docker_login
uses: docker/login-action@v3 uses: docker/login-action@v3
with: with:
username: ${{ secrets.DOCKER_USERNAME }} username: ${{ secrets.DOCKER_USERNAME }}
@ -191,26 +186,14 @@ jobs:
username: ${{ github.actor }} username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
- name: Build
id: build
if: "${{ steps.docker_meta.outputs.tags == '' }}"
uses: docker/build-push-action@v5
with:
context: .
file: ${{ matrix.dockerfile }}
platforms: linux/amd64,linux/arm64
push: false
tags: jellyplex-watched:action
- name: Build Push - name: Build Push
id: build_push id: build_push
if: "${{ steps.docker_meta.outputs.tags != '' }}" uses: docker/build-push-action@v6
uses: docker/build-push-action@v5
with: with:
context: . context: .
file: ${{ matrix.dockerfile }} file: ${{ matrix.dockerfile }}
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
push: true push: ${{ steps.docker_login.outcome == 'success' }}
tags: ${{ steps.docker_meta.outputs.tags }} tags: ${{ steps.docker_meta.outputs.tags }}
labels: ${{ steps.docker_meta.outputs.labels }} labels: ${{ steps.docker_meta.outputs.labels }}

View File

@ -19,6 +19,7 @@ Keep in sync all your users watched history between jellyfin, plex and emby serv
- \[x] One way/multi way sync - \[x] One way/multi way sync
- \[x] Sync watched - \[x] Sync watched
- \[x] Sync in progress - \[x] Sync in progress
- \[ ] Sync view dates
### Jellyfin ### Jellyfin
@ -29,6 +30,8 @@ Keep in sync all your users watched history between jellyfin, plex and emby serv
- \[x] One way/multi way sync - \[x] One way/multi way sync
- \[x] Sync watched - \[x] Sync watched
- \[x] Sync in progress - \[x] Sync in progress
- \[x] Sync view dates
### Emby ### Emby
@ -39,6 +42,8 @@ Keep in sync all your users watched history between jellyfin, plex and emby serv
- \[x] One way/multi way sync - \[x] One way/multi way sync
- \[x] Sync watched - \[x] Sync watched
- \[x] Sync in progress - \[x] Sync in progress
- \[x] Sync view dates
## Configuration ## Configuration
@ -50,7 +55,7 @@ Full list of configuration options can be found in the [.env.sample](.env.sample
- [Install uv](https://docs.astral.sh/uv/getting-started/installation/) - [Install uv](https://docs.astral.sh/uv/getting-started/installation/)
- Create a .env file similar to .env.sample; fill in baseurls and tokens, **remember to uncomment anything you wish to use** (e.g., user mapping, library mapping, black/whitelist, etc.) - Create a .env file similar to .env.sample; fill in baseurls and tokens, **remember to uncomment anything you wish to use** (e.g., user mapping, library mapping, black/whitelist, etc.). If you want to store your .env file anywhere else or under a different name you can use ENV_FILE variable to specify the location.
- Run - Run
@ -58,6 +63,10 @@ Full list of configuration options can be found in the [.env.sample](.env.sample
uv run main.py uv run main.py
``` ```
```bash
ENV_FILE="Test.env" uv run main.py
```
### Docker ### Docker
- Build docker image - Build docker image

0
entrypoint.sh Normal file → Executable file
View File

View File

@ -1,24 +1,24 @@
[project] [project]
name = "jellyplex-watched" name = "jellyplex-watched"
version = "7.0.4" version = "8.3.0"
description = "Sync watched between media servers locally" description = "Sync watched between media servers locally"
readme = "README.md" readme = "README.md"
requires-python = ">=3.12" requires-python = ">=3.12"
dependencies = [ dependencies = [
"loguru>=0.7.3", "loguru>=0.7.3",
"packaging==25.0", "packaging==25.0",
"plexapi==4.17.0", "plexapi==4.17.1",
"pydantic==2.11.4", "pydantic==2.11.7",
"python-dotenv==1.1.0", "python-dotenv==1.1.1",
"requests==2.32.3", "requests==2.32.5",
] ]
[dependency-groups] [dependency-groups]
lint = [ lint = [
"ruff>=0.11.10", "ruff>=0.12.3",
] ]
dev = [ dev = [
"mypy>=1.15.0", "mypy>=1.16.1",
"pytest>=8.3.5", "pytest>=8.4.1",
"types-requests>=2.32.0.20250515", "types-requests>=2.32.0.20250611",
] ]

10
run.sh Normal file
View File

@ -0,0 +1,10 @@
#! /usr/bin/env bash
d=/tmp/jellyplex.d
mkdir -p $d
docker run --rm -it -v "$d":/mnt $(
if [ "${PWD##*/}" == JellyPlex-Watched ]; then
echo "-v $PWD/src:/app/src"
fi
) -v $PWD/.env:/app/.env \
luigi311/jellyplex-watched:latest

View File

@ -1,18 +1,17 @@
import os
from typing import Literal from typing import Literal
from dotenv import load_dotenv
from loguru import logger from loguru import logger
from src.functions import str_to_bool from src.functions import str_to_bool, get_env_value
from src.plex import Plex from src.plex import Plex
from src.jellyfin import Jellyfin from src.jellyfin import Jellyfin
from src.emby import Emby from src.emby import Emby
load_dotenv(override=True)
def jellyfin_emby_server_connection( def jellyfin_emby_server_connection(
server_baseurl: str, server_token: str, server_type: Literal["jellyfin", "emby"] env,
server_baseurl: str,
server_token: str,
server_type: Literal["jellyfin", "emby"],
) -> list[Jellyfin | Emby]: ) -> list[Jellyfin | Emby]:
servers: list[Jellyfin | Emby] = [] servers: list[Jellyfin | Emby] = []
server: Jellyfin | Emby server: Jellyfin | Emby
@ -31,11 +30,13 @@ def jellyfin_emby_server_connection(
base_url = base_url[:-1] base_url = base_url[:-1]
if server_type == "jellyfin": if server_type == "jellyfin":
server = Jellyfin(base_url=base_url, token=server_tokens[i].strip()) server = Jellyfin(
env=env, base_url=base_url, token=server_tokens[i].strip()
)
servers.append(server) servers.append(server)
elif server_type == "emby": elif server_type == "emby":
server = Emby(base_url=base_url, token=server_tokens[i].strip()) server = Emby(env=env, base_url=base_url, token=server_tokens[i].strip())
servers.append(server) servers.append(server)
else: else:
raise Exception("Unknown server type") raise Exception("Unknown server type")
@ -45,16 +46,17 @@ def jellyfin_emby_server_connection(
return servers return servers
def generate_server_connections() -> list[Plex | Jellyfin | Emby]: def generate_server_connections(env) -> list[Plex | Jellyfin | Emby]:
servers: list[Plex | Jellyfin | Emby] = [] servers: list[Plex | Jellyfin | Emby] = []
plex_baseurl_str: str | None = os.getenv("PLEX_BASEURL", None) plex_baseurl_str: str | None = get_env_value(env, "PLEX_BASEURL", None)
plex_token_str: str | None = os.getenv("PLEX_TOKEN", None) plex_token_str: str | None = get_env_value(env, "PLEX_TOKEN", None)
plex_username_str: str | None = os.getenv("PLEX_USERNAME", None) plex_username_str: str | None = get_env_value(env, "PLEX_USERNAME", None)
plex_password_str: str | None = os.getenv("PLEX_PASSWORD", None) plex_password_str: str | None = get_env_value(env, "PLEX_PASSWORD", None)
plex_servername_str: str | None = os.getenv("PLEX_SERVERNAME", None) plex_servername_str: str | None = get_env_value(env, "PLEX_SERVERNAME", None)
ssl_bypass = str_to_bool(os.getenv("SSL_BYPASS", "False")) ssl_bypass = str_to_bool(get_env_value(env, "SSL_BYPASS", "False"))
print(f"if plex_baseurl_str={plex_baseurl_str} and plex_token_str={plex_token_str}")
if plex_baseurl_str and plex_token_str: if plex_baseurl_str and plex_token_str:
plex_baseurl = plex_baseurl_str.split(",") plex_baseurl = plex_baseurl_str.split(",")
plex_token = plex_token_str.split(",") plex_token = plex_token_str.split(",")
@ -65,7 +67,9 @@ def generate_server_connections() -> list[Plex | Jellyfin | Emby]:
) )
for i, url in enumerate(plex_baseurl): for i, url in enumerate(plex_baseurl):
print(f"Plex({url.strip()}, {plex_token[i].strip()})")
server = Plex( server = Plex(
env,
base_url=url.strip(), base_url=url.strip(),
token=plex_token[i].strip(), token=plex_token[i].strip(),
user_name=None, user_name=None,
@ -92,6 +96,7 @@ def generate_server_connections() -> list[Plex | Jellyfin | Emby]:
for i, username in enumerate(plex_username): for i, username in enumerate(plex_username):
server = Plex( server = Plex(
env,
base_url=None, base_url=None,
token=None, token=None,
user_name=username.strip(), user_name=username.strip(),
@ -103,20 +108,20 @@ def generate_server_connections() -> list[Plex | Jellyfin | Emby]:
logger.debug(f"Plex Server {i} info: {server.info()}") logger.debug(f"Plex Server {i} info: {server.info()}")
servers.append(server) servers.append(server)
jellyfin_baseurl = os.getenv("JELLYFIN_BASEURL", None) jellyfin_baseurl = get_env_value(env, "JELLYFIN_BASEURL", None)
jellyfin_token = os.getenv("JELLYFIN_TOKEN", None) jellyfin_token = get_env_value(env, "JELLYFIN_TOKEN", None)
if jellyfin_baseurl and jellyfin_token: if jellyfin_baseurl and jellyfin_token:
servers.extend( servers.extend(
jellyfin_emby_server_connection( jellyfin_emby_server_connection(
jellyfin_baseurl, jellyfin_token, "jellyfin" env, jellyfin_baseurl, jellyfin_token, "jellyfin"
) )
) )
emby_baseurl = os.getenv("EMBY_BASEURL", None) emby_baseurl = get_env_value(env, "EMBY_BASEURL", None)
emby_token = os.getenv("EMBY_TOKEN", None) emby_token = get_env_value(env, "EMBY_TOKEN", None)
if emby_baseurl and emby_token: if emby_baseurl and emby_token:
servers.extend( servers.extend(
jellyfin_emby_server_connection(emby_baseurl, emby_token, "emby") jellyfin_emby_server_connection(env, emby_baseurl, emby_token, "emby")
) )
return servers return servers

View File

@ -4,7 +4,7 @@ from loguru import logger
class Emby(JellyfinEmby): class Emby(JellyfinEmby):
def __init__(self, base_url: str, token: str) -> None: def __init__(self, env, base_url: str, token: str) -> None:
authorization = ( authorization = (
"Emby , " "Emby , "
'Client="JellyPlex-Watched", ' 'Client="JellyPlex-Watched", '
@ -19,7 +19,7 @@ class Emby(JellyfinEmby):
} }
super().__init__( super().__init__(
server_type="Emby", base_url=base_url, token=token, headers=headers env, server_type="Emby", base_url=base_url, token=token, headers=headers
) )
def is_partial_update_supported(self, server_version: Version) -> bool: def is_partial_update_supported(self, server_version: Version) -> bool:

View File

@ -2,11 +2,11 @@ import os
from concurrent.futures import Future, ThreadPoolExecutor from concurrent.futures import Future, ThreadPoolExecutor
from typing import Any, Callable from typing import Any, Callable
from dotenv import load_dotenv from dotenv import load_dotenv
import re
from pathlib import PureWindowsPath, PurePosixPath
load_dotenv(override=True) load_dotenv(override=True)
mark_file = os.getenv("MARK_FILE", os.getenv("MARKFILE", "mark.log"))
def log_marked( def log_marked(
server_type: str, server_type: str,
@ -16,6 +16,7 @@ def log_marked(
movie_show: str, movie_show: str,
episode: str | None = None, episode: str | None = None,
duration: float | None = None, duration: float | None = None,
mark_file: str = "mark.log",
) -> None: ) -> None:
output = f"{server_type}/{server_name}/{username}/{library}/{movie_show}" output = f"{server_type}/{server_name}/{username}/{library}/{movie_show}"
@ -29,9 +30,18 @@ def log_marked(
file.write(output + "\n") file.write(output + "\n")
def get_env_value(env, key: str, default: Any = None):
if env and key in env:
return env[key]
elif os.getenv(key):
return os.getenv(key)
else:
return default
# Reimplementation of distutils.util.strtobool due to it being deprecated # Reimplementation of distutils.util.strtobool due to it being deprecated
# Source: https://github.com/PostHog/posthog/blob/01e184c29d2c10c43166f1d40a334abbc3f99d8a/posthog/utils.py#L668 # Source: https://github.com/PostHog/posthog/blob/01e184c29d2c10c43166f1d40a334abbc3f99d8a/posthog/utils.py#L668
def str_to_bool(value: str) -> bool: def str_to_bool(value: str | None) -> bool:
if not value: if not value:
return False return False
return str(value).lower() in ("y", "yes", "t", "true", "on", "1") return str(value).lower() in ("y", "yes", "t", "true", "on", "1")
@ -73,13 +83,13 @@ def future_thread_executor(
args: list[tuple[Callable[..., Any], ...]], args: list[tuple[Callable[..., Any], ...]],
threads: int | None = None, threads: int | None = None,
override_threads: bool = False, override_threads: bool = False,
max_threads: int | None = None,
) -> list[Any]: ) -> list[Any]:
results: list[Any] = [] results: list[Any] = []
# Determine the number of workers, defaulting to 1 if os.cpu_count() returns None # Determine the number of workers, defaulting to 1 if os.cpu_count() returns None
max_threads_env: int = int(os.getenv("MAX_THREADS", 32))
cpu_threads: int = os.cpu_count() or 1 # Default to 1 if os.cpu_count() is None cpu_threads: int = os.cpu_count() or 1 # Default to 1 if os.cpu_count() is None
workers: int = min(max_threads_env, cpu_threads * 2) workers: int = min(max_threads, cpu_threads * 2) if max_threads else cpu_threads * 2
# Adjust workers based on threads parameter and override_threads flag # Adjust workers based on threads parameter and override_threads flag
if threads is not None: if threads is not None:
@ -116,3 +126,13 @@ def parse_string_to_list(string: str | None) -> list[str]:
output = string.split(",") output = string.split(",")
return output return output
_WINDOWS_DRIVE = re.compile(r"^[A-Za-z]:") # e.g. C: D:
def filename_from_any_path(p: str) -> str:
# Windows-y if UNC (\\server\share), drive letter, or has backslashes
if p.startswith("\\\\") or _WINDOWS_DRIVE.match(p) or ("\\" in p and "/" not in p):
return PureWindowsPath(p).name
return PurePosixPath(p).name

View File

@ -4,7 +4,7 @@ from loguru import logger
class Jellyfin(JellyfinEmby): class Jellyfin(JellyfinEmby):
def __init__(self, base_url: str, token: str) -> None: def __init__(self, env, base_url: str, token: str) -> None:
authorization = ( authorization = (
"MediaBrowser , " "MediaBrowser , "
'Client="JellyPlex-Watched", ' 'Client="JellyPlex-Watched", '
@ -19,7 +19,7 @@ class Jellyfin(JellyfinEmby):
} }
super().__init__( super().__init__(
server_type="Jellyfin", base_url=base_url, token=token, headers=headers env, server_type="Jellyfin", base_url=base_url, token=token, headers=headers
) )
def is_partial_update_supported(self, server_version: Version) -> bool: def is_partial_update_supported(self, server_version: Version) -> bool:

View File

@ -1,18 +1,19 @@
# Functions for Jellyfin and Emby # Functions for Jellyfin and Emby
from datetime import datetime
import requests import requests
import traceback import traceback
import os
from math import floor from math import floor
from typing import Any, Literal from typing import Any, Literal
from dotenv import load_dotenv
from packaging.version import parse, Version from packaging.version import parse, Version
from loguru import logger from loguru import logger
from src.functions import ( from src.functions import (
filename_from_any_path,
search_mapping, search_mapping,
log_marked, log_marked,
str_to_bool, str_to_bool,
get_env_value,
) )
from src.watched import ( from src.watched import (
LibraryData, LibraryData,
@ -24,44 +25,45 @@ from src.watched import (
check_same_identifiers, check_same_identifiers,
) )
load_dotenv(override=True)
generate_guids = str_to_bool(os.getenv("GENERATE_GUIDS", "True"))
generate_locations = str_to_bool(os.getenv("GENERATE_LOCATIONS", "True"))
def extract_identifiers_from_item( def extract_identifiers_from_item(
server_type: str, item: dict[str, Any] server_type: str,
item: dict[str, Any],
generate_guids: bool,
generate_locations: bool,
) -> MediaIdentifiers: ) -> MediaIdentifiers:
title = item.get("Name") title = item.get("Name")
id = None id = None
if not title: if not title:
id = item.get("Id") id = item.get("Id")
logger.info(f"{server_type}: Name not found for {id}") logger.debug(f"{server_type}: Name not found for {id}")
guids = {} guids = {}
if generate_guids: if generate_guids:
guids = {k.lower(): v for k, v in item.get("ProviderIds", {}).items()} guids = {k.lower(): v for k, v in item.get("ProviderIds", {}).items()}
if not guids:
logger.info(
f"{server_type}: {title if title else id} has no guids",
)
locations: tuple[str, ...] = tuple() locations: tuple[str, ...] = tuple()
full_path: str = ""
if generate_locations: if generate_locations:
if item.get("Path"): if item.get("Path"):
locations = tuple([item["Path"].split("/")[-1]]) full_path = item["Path"]
locations = tuple([filename_from_any_path(full_path)])
elif item.get("MediaSources"): elif item.get("MediaSources"):
locations = tuple( full_paths = [x["Path"] for x in item["MediaSources"] if x.get("Path")]
[ locations = tuple([filename_from_any_path(x) for x in full_paths])
x["Path"].split("/")[-1] full_path = " ".join(full_paths)
for x in item["MediaSources"]
if x.get("Path") if generate_guids:
] if not guids:
logger.debug(
f"{server_type}: {title if title else id} has no guids{f', locations: {full_path}' if full_path else ''}",
) )
if generate_locations:
if not locations: if not locations:
logger.info(f"{server_type}: {title if title else id} has no locations") logger.debug(
f"{server_type}: {title if title else id} has no locations{f', guids: {guids}' if guids else ''}",
)
return MediaIdentifiers( return MediaIdentifiers(
title=title, title=title,
@ -72,14 +74,27 @@ def extract_identifiers_from_item(
) )
def get_mediaitem(server_type: str, item: dict[str, Any]) -> MediaItem: def get_mediaitem(
server_type: str,
item: dict[str, Any],
generate_guids: bool,
generate_locations: bool,
) -> MediaItem:
user_data = item.get("UserData", {})
last_played_date = user_data.get("LastPlayedDate")
viewed_date = datetime.today()
if last_played_date:
viewed_date = datetime.fromisoformat(last_played_date.replace("Z", "+00:00"))
return MediaItem( return MediaItem(
identifiers=extract_identifiers_from_item(server_type, item), identifiers=extract_identifiers_from_item(
server_type, item, generate_guids, generate_locations
),
status=WatchedStatus( status=WatchedStatus(
completed=item.get("UserData", {}).get("Played"), completed=user_data.get("Played"),
time=floor( time=floor(user_data.get("PlaybackPositionTicks", 0) / 10000),
item.get("UserData", {}).get("PlaybackPositionTicks", 0) / 10000 viewed_date=viewed_date,
),
), ),
) )
@ -87,18 +102,21 @@ def get_mediaitem(server_type: str, item: dict[str, Any]) -> MediaItem:
class JellyfinEmby: class JellyfinEmby:
def __init__( def __init__(
self, self,
env,
server_type: Literal["Jellyfin", "Emby"], server_type: Literal["Jellyfin", "Emby"],
base_url: str, base_url: str,
token: str, token: str,
headers: dict[str, str], headers: dict[str, str],
) -> None: ) -> None:
self.env = env
if server_type not in ["Jellyfin", "Emby"]: if server_type not in ["Jellyfin", "Emby"]:
raise Exception(f"Server type {server_type} not supported") raise Exception(f"Server type {server_type} not supported")
self.server_type: str = server_type self.server_type: str = server_type
self.base_url: str = base_url self.base_url: str = base_url
self.token: str = token self.token: str = token
self.headers: dict[str, str] = headers self.headers: dict[str, str] = headers
self.timeout: int = int(os.getenv("REQUEST_TIMEOUT", 300)) self.timeout: int = int(get_env_value(self.env, "REQUEST_TIMEOUT", 300))
if not self.base_url: if not self.base_url:
raise Exception(f"{self.server_type} base_url not set") raise Exception(f"{self.server_type} base_url not set")
@ -113,6 +131,12 @@ class JellyfinEmby:
self.update_partial: bool = self.is_partial_update_supported( self.update_partial: bool = self.is_partial_update_supported(
self.server_version self.server_version
) )
self.generate_guids: bool = str_to_bool(
get_env_value(self.env, "GENERATE_GUIDS", "True")
)
self.generate_locations: bool = str_to_bool(
get_env_value(self.env, "GENERATE_LOCATIONS", "True")
)
def query( def query(
self, self,
@ -297,7 +321,7 @@ class JellyfinEmby:
movie_items = [] movie_items = []
watched_items = self.query( watched_items = self.query(
f"/Users/{user_id}/Items" f"/Users/{user_id}/Items"
+ f"?ParentId={library_id}&Filters=IsPlayed&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources", + f"?ParentId={library_id}&Filters=IsPlayed&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources,UserDataLastPlayedDate",
"get", "get",
) )
@ -306,7 +330,7 @@ class JellyfinEmby:
in_progress_items = self.query( in_progress_items = self.query(
f"/Users/{user_id}/Items" f"/Users/{user_id}/Items"
+ f"?ParentId={library_id}&Filters=IsResumable&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources", + f"?ParentId={library_id}&Filters=IsResumable&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources,UserDataLastPlayedDate",
"get", "get",
) )
@ -327,7 +351,14 @@ class JellyfinEmby:
movie["UserData"].get("Played") movie["UserData"].get("Played")
or movie["UserData"].get("PlaybackPositionTicks", 0) > 600000000 or movie["UserData"].get("PlaybackPositionTicks", 0) > 600000000
): ):
watched.movies.append(get_mediaitem(self.server_type, movie)) watched.movies.append(
get_mediaitem(
self.server_type,
movie,
self.generate_guids,
self.generate_locations,
)
)
# TV Shows # TV Shows
if library_type == "tvshows": if library_type == "tvshows":
@ -350,8 +381,24 @@ class JellyfinEmby:
if not show.get("UserData"): if not show.get("UserData"):
continue continue
if show["UserData"].get("PlayedPercentage", 0) > 0: played_percentage = show["UserData"].get("PlayedPercentage")
watched_shows_filtered.append(show) if played_percentage is None:
# Emby no longer shows PlayedPercentage
total_episodes = show.get("RecursiveItemCount")
unplayed_episodes = show["UserData"].get("UnplayedItemCount")
if total_episodes is None:
# Failed to get total count of episodes
continue
if (
unplayed_episodes is not None
and unplayed_episodes < total_episodes
):
watched_shows_filtered.append(show)
else:
if played_percentage > 0:
watched_shows_filtered.append(show)
# Retrieve the watched/partially watched list of episodes of each watched show # Retrieve the watched/partially watched list of episodes of each watched show
for show in watched_shows_filtered: for show in watched_shows_filtered:
@ -360,14 +407,14 @@ class JellyfinEmby:
k.lower(): v for k, v in show.get("ProviderIds", {}).items() k.lower(): v for k, v in show.get("ProviderIds", {}).items()
} }
show_locations = ( show_locations = (
tuple([show["Path"].split("/")[-1]]) tuple([filename_from_any_path(show["Path"])])
if show.get("Path") if show.get("Path")
else tuple() else tuple()
) )
show_episodes = self.query( show_episodes = self.query(
f"/Shows/{show.get('Id')}/Episodes" f"/Shows/{show.get('Id')}/Episodes"
+ f"?userId={user_id}&isPlaceHolder=false&Fields=ProviderIds,MediaSources", + f"?userId={user_id}&isPlaceHolder=false&Fields=ProviderIds,MediaSources,UserDataLastPlayedDate",
"get", "get",
) )
@ -394,7 +441,12 @@ class JellyfinEmby:
> 600000000 > 600000000
): ):
episode_mediaitem.append( episode_mediaitem.append(
get_mediaitem(self.server_type, episode) get_mediaitem(
self.server_type,
episode,
self.generate_guids,
self.generate_locations,
)
) )
if episode_mediaitem: if episode_mediaitem:
@ -425,12 +477,19 @@ class JellyfinEmby:
return LibraryData(title=library_title) return LibraryData(title=library_title)
def get_watched( def get_watched(
self, users: dict[str, str], sync_libraries: list[str] self,
users: dict[str, str],
sync_libraries: list[str],
users_watched: dict[str, UserData] = None,
) -> dict[str, UserData]: ) -> dict[str, UserData]:
try: try:
users_watched: dict[str, UserData] = {} if not users_watched:
users_watched: dict[str, UserData] = {}
for user_name, user_id in users.items(): for user_name, user_id in users.items():
if user_name.lower() not in users_watched:
users_watched[user_name.lower()] = UserData()
all_libraries = self.query(f"/Users/{user_id}/Views", "get") all_libraries = self.query(f"/Users/{user_id}/Views", "get")
if not all_libraries or not isinstance(all_libraries, dict): if not all_libraries or not isinstance(all_libraries, dict):
logger.debug( logger.debug(
@ -439,16 +498,24 @@ class JellyfinEmby:
continue continue
for library in all_libraries.get("Items", []): for library in all_libraries.get("Items", []):
if library.get("Name") not in sync_libraries:
continue
library_id = library.get("Id") library_id = library.get("Id")
library_title = library.get("Name") library_title = library.get("Name")
library_type = library.get("CollectionType") library_type = library.get("CollectionType")
if not library_id or not library_title or not library_type: if not library_id or not library_title or not library_type:
logger.debug( logger.debug(
f"{self.server_type}: Failed to get library data for {user_name} {library_title}" f"{self.server_type}: Failed to get library data for {user_name} {library_title}"
) )
continue
if library_title not in sync_libraries:
continue
if library_title in users_watched:
logger.info(
f"{self.server_type}: {user_name} {library_title} watched history has already been gathered, skipping"
)
continue
# Get watched for user # Get watched for user
library_data = self.get_user_library_watched( library_data = self.get_user_library_watched(
@ -506,7 +573,10 @@ class JellyfinEmby:
for jellyfin_video in jellyfin_search.get("Items", []): for jellyfin_video in jellyfin_search.get("Items", []):
jelly_identifiers = extract_identifiers_from_item( jelly_identifiers = extract_identifiers_from_item(
self.server_type, jellyfin_video self.server_type,
jellyfin_video,
self.generate_guids,
self.generate_locations,
) )
# Check each stored movie for a match. # Check each stored movie for a match.
for stored_movie in library_data.movies: for stored_movie in library_data.movies:
@ -514,12 +584,28 @@ class JellyfinEmby:
jelly_identifiers, stored_movie.identifiers jelly_identifiers, stored_movie.identifiers
): ):
jellyfin_video_id = jellyfin_video.get("Id") jellyfin_video_id = jellyfin_video.get("Id")
viewed_date: str = (
stored_movie.status.viewed_date.isoformat(
timespec="milliseconds"
).replace("+00:00", "Z")
)
if stored_movie.status.completed: if stored_movie.status.completed:
msg = f"{self.server_type}: {jellyfin_video.get('Name')} as watched for {user_name} in {library_name}" msg = f"{self.server_type}: {jellyfin_video.get('Name')} as watched for {user_name} in {library_name}"
if not dryrun: if not dryrun:
user_data_payload: dict[
str, float | bool | datetime
] = {
"PlayCount": 1,
"Played": True,
"PlaybackPositionTicks": 0,
"LastPlayedDate": viewed_date,
}
self.query( self.query(
f"/Users/{user_id}/PlayedItems/{jellyfin_video_id}", f"/Users/{user_id}/Items/{jellyfin_video_id}/UserData",
"post", "post",
json=user_data_payload,
) )
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}") logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
@ -529,19 +615,27 @@ class JellyfinEmby:
user_name, user_name,
library_name, library_name,
jellyfin_video.get("Name"), jellyfin_video.get("Name"),
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
) )
elif self.update_partial: elif self.update_partial:
msg = f"{self.server_type}: {jellyfin_video.get('Name')} as partially watched for {floor(stored_movie.status.time / 60_000)} minutes for {user_name} in {library_name}" msg = f"{self.server_type}: {jellyfin_video.get('Name')} as partially watched for {floor(stored_movie.status.time / 60_000)} minutes for {user_name} in {library_name}"
if not dryrun: if not dryrun:
playback_position_payload: dict[str, float] = { user_data_payload: dict[
str, float | bool | datetime
] = {
"PlayCount": 0,
"Played": False,
"PlaybackPositionTicks": stored_movie.status.time "PlaybackPositionTicks": stored_movie.status.time
* 10_000, * 10_000,
"LastPlayedDate": viewed_date,
} }
self.query( self.query(
f"/Users/{user_id}/Items/{jellyfin_video_id}/UserData", f"/Users/{user_id}/Items/{jellyfin_video_id}/UserData",
"post", "post",
json=playback_position_payload, json=user_data_payload,
) )
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}") logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
@ -552,6 +646,9 @@ class JellyfinEmby:
library_name, library_name,
jellyfin_video.get("Name"), jellyfin_video.get("Name"),
duration=floor(stored_movie.status.time / 60_000), duration=floor(stored_movie.status.time / 60_000),
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
) )
else: else:
logger.trace( logger.trace(
@ -576,7 +673,10 @@ class JellyfinEmby:
for jellyfin_show in jellyfin_shows: for jellyfin_show in jellyfin_shows:
jellyfin_show_identifiers = extract_identifiers_from_item( jellyfin_show_identifiers = extract_identifiers_from_item(
self.server_type, jellyfin_show self.server_type,
jellyfin_show,
self.generate_guids,
self.generate_locations,
) )
# Try to find a matching series in your stored library. # Try to find a matching series in your stored library.
for stored_series in library_data.series: for stored_series in library_data.series:
@ -606,7 +706,10 @@ class JellyfinEmby:
for jellyfin_episode in jellyfin_episodes.get("Items", []): for jellyfin_episode in jellyfin_episodes.get("Items", []):
jellyfin_episode_identifiers = ( jellyfin_episode_identifiers = (
extract_identifiers_from_item( extract_identifiers_from_item(
self.server_type, jellyfin_episode self.server_type,
jellyfin_episode,
self.generate_guids,
self.generate_locations,
) )
) )
for stored_ep in stored_series.episodes: for stored_ep in stored_series.episodes:
@ -615,15 +718,31 @@ class JellyfinEmby:
stored_ep.identifiers, stored_ep.identifiers,
): ):
jellyfin_episode_id = jellyfin_episode.get("Id") jellyfin_episode_id = jellyfin_episode.get("Id")
viewed_date: str = (
stored_ep.status.viewed_date.isoformat(
timespec="milliseconds"
).replace("+00:00", "Z")
)
if stored_ep.status.completed: if stored_ep.status.completed:
msg = ( msg = (
f"{self.server_type}: {jellyfin_episode.get('SeriesName')} {jellyfin_episode.get('SeasonName')} Episode {jellyfin_episode.get('IndexNumber')} {jellyfin_episode.get('Name')}" f"{self.server_type}: {jellyfin_episode.get('SeriesName')} {jellyfin_episode.get('SeasonName')} Episode {jellyfin_episode.get('IndexNumber')} {jellyfin_episode.get('Name')}"
+ f" as watched for {user_name} in {library_name}" + f" as watched for {user_name} in {library_name}"
) )
if not dryrun: if not dryrun:
user_data_payload: dict[
str, float | bool | datetime
] = {
"PlayCount": 1,
"Played": True,
"PlaybackPositionTicks": 0,
"LastPlayedDate": viewed_date,
}
self.query( self.query(
f"/Users/{user_id}/PlayedItems/{jellyfin_episode_id}", f"/Users/{user_id}/Items/{jellyfin_episode_id}/UserData",
"post", "post",
json=user_data_payload,
) )
logger.success( logger.success(
@ -636,6 +755,9 @@ class JellyfinEmby:
library_name, library_name,
jellyfin_episode.get("SeriesName"), jellyfin_episode.get("SeriesName"),
jellyfin_episode.get("Name"), jellyfin_episode.get("Name"),
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
) )
elif self.update_partial: elif self.update_partial:
msg = ( msg = (
@ -644,14 +766,19 @@ class JellyfinEmby:
) )
if not dryrun: if not dryrun:
playback_position_payload = { user_data_payload: dict[
str, float | bool | datetime
] = {
"PlayCount": 0,
"Played": False,
"PlaybackPositionTicks": stored_ep.status.time "PlaybackPositionTicks": stored_ep.status.time
* 10_000, * 10_000,
"LastPlayedDate": viewed_date,
} }
self.query( self.query(
f"/Users/{user_id}/Items/{jellyfin_episode_id}/UserData", f"/Users/{user_id}/Items/{jellyfin_episode_id}/UserData",
"post", "post",
json=playback_position_payload, json=user_data_payload,
) )
logger.success( logger.success(
@ -667,6 +794,9 @@ class JellyfinEmby:
duration=floor( duration=floor(
stored_ep.status.time / 60_000 stored_ep.status.time / 60_000
), ),
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
) )
else: else:
logger.trace( logger.trace(

View File

@ -2,7 +2,7 @@ import os
import traceback import traceback
import json import json
import sys import sys
from dotenv import load_dotenv from dotenv import dotenv_values
from time import sleep, perf_counter from time import sleep, perf_counter
from loguru import logger from loguru import logger
@ -13,61 +13,69 @@ from src.library import setup_libraries
from src.functions import ( from src.functions import (
parse_string_to_list, parse_string_to_list,
str_to_bool, str_to_bool,
get_env_value,
) )
from src.users import setup_users from src.users import setup_users
from src.watched import ( from src.watched import (
cleanup_watched, cleanup_watched,
merge_server_watched,
) )
from src.black_white import setup_black_white_lists from src.black_white import setup_black_white_lists
from src.connection import generate_server_connections from src.connection import generate_server_connections
load_dotenv(override=True)
log_file = os.getenv("LOG_FILE", os.getenv("LOGFILE", "log.log")) def configure_logger(log_file: str = "log.log", debug_level: str = "INFO") -> None:
level = os.getenv("DEBUG_LEVEL", "INFO").upper()
def configure_logger() -> None:
# Remove default logger to configure our own # Remove default logger to configure our own
logger.remove() logger.remove()
# Choose log level based on environment # Choose log level based on environment
# If in debug mode with a "debug" level, use DEBUG; otherwise, default to INFO. # If in debug mode with a "debug" level, use DEBUG; otherwise, default to INFO.
if level not in ["INFO", "DEBUG", "TRACE"]: if debug_level not in ["INFO", "DEBUG", "TRACE"]:
logger.add(sys.stdout) logger.add(sys.stdout)
raise Exception("Invalid DEBUG_LEVEL, please choose between INFO, DEBUG, TRACE") raise Exception(
f"Invalid DEBUG_LEVEL {debug_level}, please choose between INFO, DEBUG, TRACE"
)
# Add a sink for file logging and the console. # Add a sink for file logging and the console.
logger.add(log_file, level=level, mode="w") logger.add(log_file, level=debug_level, mode="w")
logger.add(sys.stdout, level=level) logger.add(sys.stdout, level=debug_level)
def should_sync_server( def should_sync_server(
env,
server_1: Plex | Jellyfin | Emby, server_1: Plex | Jellyfin | Emby,
server_2: Plex | Jellyfin | Emby, server_2: Plex | Jellyfin | Emby,
) -> bool: ) -> bool:
sync_from_plex_to_jellyfin = str_to_bool( sync_from_plex_to_jellyfin = str_to_bool(
os.getenv("SYNC_FROM_PLEX_TO_JELLYFIN", "True") get_env_value(env, "SYNC_FROM_PLEX_TO_JELLYFIN", "True")
)
sync_from_plex_to_plex = str_to_bool(
get_env_value(env, "SYNC_FROM_PLEX_TO_PLEX", "True")
)
sync_from_plex_to_emby = str_to_bool(
get_env_value(env, "SYNC_FROM_PLEX_TO_EMBY", "True")
) )
sync_from_plex_to_plex = str_to_bool(os.getenv("SYNC_FROM_PLEX_TO_PLEX", "True"))
sync_from_plex_to_emby = str_to_bool(os.getenv("SYNC_FROM_PLEX_TO_EMBY", "True"))
sync_from_jelly_to_plex = str_to_bool( sync_from_jelly_to_plex = str_to_bool(
os.getenv("SYNC_FROM_JELLYFIN_TO_PLEX", "True") get_env_value(env, "SYNC_FROM_JELLYFIN_TO_PLEX", "True")
) )
sync_from_jelly_to_jellyfin = str_to_bool( sync_from_jelly_to_jellyfin = str_to_bool(
os.getenv("SYNC_FROM_JELLYFIN_TO_JELLYFIN", "True") get_env_value(env, "SYNC_FROM_JELLYFIN_TO_JELLYFIN", "True")
) )
sync_from_jelly_to_emby = str_to_bool( sync_from_jelly_to_emby = str_to_bool(
os.getenv("SYNC_FROM_JELLYFIN_TO_EMBY", "True") get_env_value(env, "SYNC_FROM_JELLYFIN_TO_EMBY", "True")
) )
sync_from_emby_to_plex = str_to_bool(os.getenv("SYNC_FROM_EMBY_TO_PLEX", "True")) sync_from_emby_to_plex = str_to_bool(
sync_from_emby_to_jellyfin = str_to_bool( get_env_value(env, "SYNC_FROM_EMBY_TO_PLEX", "True")
os.getenv("SYNC_FROM_EMBY_TO_JELLYFIN", "True") )
sync_from_emby_to_jellyfin = str_to_bool(
get_env_value(env, "SYNC_FROM_EMBY_TO_JELLYFIN", "True")
)
sync_from_emby_to_emby = str_to_bool(
get_env_value(env, "SYNC_FROM_EMBY_TO_EMBY", "True")
) )
sync_from_emby_to_emby = str_to_bool(os.getenv("SYNC_FROM_EMBY_TO_EMBY", "True"))
if isinstance(server_1, Plex): if isinstance(server_1, Plex):
if isinstance(server_2, Jellyfin) and not sync_from_plex_to_jellyfin: if isinstance(server_2, Jellyfin) and not sync_from_plex_to_jellyfin:
@ -111,17 +119,17 @@ def should_sync_server(
return True return True
def main_loop() -> None: def main_loop(env) -> None:
dryrun = str_to_bool(os.getenv("DRYRUN", "False")) dryrun = str_to_bool(get_env_value(env, "DRYRUN", "False"))
logger.info(f"Dryrun: {dryrun}") logger.info(f"Dryrun: {dryrun}")
user_mapping_env = os.getenv("USER_MAPPING", None) user_mapping_env = get_env_value(env, "USER_MAPPING", None)
user_mapping = None user_mapping = None
if user_mapping_env: if user_mapping_env:
user_mapping = json.loads(user_mapping_env.lower()) user_mapping = json.loads(user_mapping_env.lower())
logger.info(f"User Mapping: {user_mapping}") logger.info(f"User Mapping: {user_mapping}")
library_mapping_env = os.getenv("LIBRARY_MAPPING", None) library_mapping_env = get_env_value(env, "LIBRARY_MAPPING", None)
library_mapping = None library_mapping = None
if library_mapping_env: if library_mapping_env:
library_mapping = json.loads(library_mapping_env) library_mapping = json.loads(library_mapping_env)
@ -129,16 +137,20 @@ def main_loop() -> None:
# Create (black/white)lists # Create (black/white)lists
logger.info("Creating (black/white)lists") logger.info("Creating (black/white)lists")
blacklist_library = parse_string_to_list(os.getenv("BLACKLIST_LIBRARY", None)) blacklist_library = parse_string_to_list(
whitelist_library = parse_string_to_list(os.getenv("WHITELIST_LIBRARY", None)) get_env_value(env, "BLACKLIST_LIBRARY", None)
)
whitelist_library = parse_string_to_list(
get_env_value(env, "WHITELIST_LIBRARY", None)
)
blacklist_library_type = parse_string_to_list( blacklist_library_type = parse_string_to_list(
os.getenv("BLACKLIST_LIBRARY_TYPE", None) get_env_value(env, "BLACKLIST_LIBRARY_TYPE", None)
) )
whitelist_library_type = parse_string_to_list( whitelist_library_type = parse_string_to_list(
os.getenv("WHITELIST_LIBRARY_TYPE", None) get_env_value(env, "WHITELIST_LIBRARY_TYPE", None)
) )
blacklist_users = parse_string_to_list(os.getenv("BLACKLIST_USERS", None)) blacklist_users = parse_string_to_list(get_env_value(env, "BLACKLIST_USERS", None))
whitelist_users = parse_string_to_list(os.getenv("WHITELIST_USERS", None)) whitelist_users = parse_string_to_list(get_env_value(env, "WHITELIST_USERS", None))
( (
blacklist_library, blacklist_library,
@ -160,19 +172,22 @@ def main_loop() -> None:
# Create server connections # Create server connections
logger.info("Creating server connections") logger.info("Creating server connections")
servers = generate_server_connections() servers = generate_server_connections(env)
for server_1 in servers: for server_1 in servers:
# If server is the final server in the list, then we are done with the loop # If server is the final server in the list, then we are done with the loop
if server_1 == servers[-1]: if server_1 == servers[-1]:
break break
# Store a copy of server_1_watched that way it can be used multiple times without having to regather everyones watch history every single time
server_1_watched = None
# Start server_2 at the next server in the list # Start server_2 at the next server in the list
for server_2 in servers[servers.index(server_1) + 1 :]: for server_2 in servers[servers.index(server_1) + 1 :]:
# Check if server 1 and server 2 are going to be synced in either direction, skip if not # Check if server 1 and server 2 are going to be synced in either direction, skip if not
if not should_sync_server(server_1, server_2) and not should_sync_server( if not should_sync_server(
server_2, server_1 env, server_1, server_2
): ) and not should_sync_server(env, server_2, server_1):
continue continue
logger.info(f"Server 1: {type(server_1)}: {server_1.info()}") logger.info(f"Server 1: {type(server_1)}: {server_1.info()}")
@ -197,7 +212,9 @@ def main_loop() -> None:
logger.info(f"Server 2 syncing libraries: {server_2_libraries}") logger.info(f"Server 2 syncing libraries: {server_2_libraries}")
logger.info("Creating watched lists", 1) logger.info("Creating watched lists", 1)
server_1_watched = server_1.get_watched(server_1_users, server_1_libraries) server_1_watched = server_1.get_watched(
server_1_users, server_1_libraries, server_1_watched
)
logger.info("Finished creating watched list server 1") logger.info("Finished creating watched list server 1")
server_2_watched = server_2.get_watched(server_2_users, server_2_libraries) server_2_watched = server_2.get_watched(server_2_users, server_2_libraries)
@ -223,8 +240,18 @@ def main_loop() -> None:
f"server 2 watched that needs to be synced to server 1:\n{server_2_watched_filtered}", f"server 2 watched that needs to be synced to server 1:\n{server_2_watched_filtered}",
) )
if should_sync_server(server_2, server_1): if should_sync_server(env, server_2, server_1):
logger.info(f"Syncing {server_2.info()} -> {server_1.info()}") logger.info(f"Syncing {server_2.info()} -> {server_1.info()}")
# Add server_2_watched_filtered to server_1_watched that way the stored version isn't stale for the next server
if not dryrun:
server_1_watched = merge_server_watched(
server_1_watched,
server_2_watched_filtered,
user_mapping,
library_mapping,
)
server_1.update_watched( server_1.update_watched(
server_2_watched_filtered, server_2_watched_filtered,
user_mapping, user_mapping,
@ -232,7 +259,7 @@ def main_loop() -> None:
dryrun, dryrun,
) )
if should_sync_server(server_1, server_2): if should_sync_server(env, server_1, server_2):
logger.info(f"Syncing {server_1.info()} -> {server_2.info()}") logger.info(f"Syncing {server_1.info()} -> {server_2.info()}")
server_2.update_watched( server_2.update_watched(
server_1_watched_filtered, server_1_watched_filtered,
@ -244,15 +271,24 @@ def main_loop() -> None:
@logger.catch @logger.catch
def main() -> None: def main() -> None:
run_only_once = str_to_bool(os.getenv("RUN_ONLY_ONCE", "False")) # Get environment variables
sleep_duration = float(os.getenv("SLEEP_DURATION", "3600")) env_file = get_env_value(None, "ENV_FILE", ".env")
env = dotenv_values(env_file)
run_only_once = str_to_bool(get_env_value(env, "RUN_ONLY_ONCE", "False"))
sleep_duration = float(get_env_value(env, "SLEEP_DURATION", "3600"))
log_file = get_env_value(env, "LOG_FILE", "log.log")
debug_level = get_env_value(env, "DEBUG_LEVEL", "INFO")
if debug_level:
debug_level = debug_level.upper()
times: list[float] = [] times: list[float] = []
while True: while True:
try: try:
start = perf_counter() start = perf_counter()
# Reconfigure the logger on each loop so the logs are rotated on each run # Reconfigure the logger on each loop so the logs are rotated on each run
configure_logger() configure_logger(log_file, debug_level)
main_loop() main_loop(env)
end = perf_counter() end = perf_counter()
times.append(end - start) times.append(end - start)

View File

@ -1,6 +1,5 @@
import os from datetime import datetime, timezone
import requests import requests
from dotenv import load_dotenv
from loguru import logger from loguru import logger
from urllib3.poolmanager import PoolManager from urllib3.poolmanager import PoolManager
@ -14,9 +13,11 @@ from plexapi.myplex import MyPlexAccount, MyPlexUser
from plexapi.library import MovieSection, ShowSection from plexapi.library import MovieSection, ShowSection
from src.functions import ( from src.functions import (
filename_from_any_path,
search_mapping, search_mapping,
log_marked, log_marked,
str_to_bool, str_to_bool,
get_env_value,
) )
from src.watched import ( from src.watched import (
LibraryData, LibraryData,
@ -28,11 +29,6 @@ from src.watched import (
check_same_identifiers, check_same_identifiers,
) )
load_dotenv(override=True)
generate_guids = str_to_bool(os.getenv("GENERATE_GUIDS", "True"))
generate_locations = str_to_bool(os.getenv("GENERATE_LOCATIONS", "True"))
# Bypass hostname validation for ssl. Taken from https://github.com/pkkid/python-plexapi/issues/143#issuecomment-775485186 # Bypass hostname validation for ssl. Taken from https://github.com/pkkid/python-plexapi/issues/143#issuecomment-775485186
class HostNameIgnoringAdapter(RequestsHTTPAdapter): class HostNameIgnoringAdapter(RequestsHTTPAdapter):
@ -48,7 +44,9 @@ class HostNameIgnoringAdapter(RequestsHTTPAdapter):
) )
def extract_guids_from_item(item: Movie | Show | Episode) -> dict[str, str]: def extract_guids_from_item(
item: Movie | Show | Episode, generate_guids: bool
) -> dict[str, str]:
# If GENERATE_GUIDS is set to False, then return an empty dict # If GENERATE_GUIDS is set to False, then return an empty dict
if not generate_guids: if not generate_guids:
return {} return {}
@ -62,181 +60,66 @@ def extract_guids_from_item(item: Movie | Show | Episode) -> dict[str, str]:
return guids return guids
def extract_identifiers_from_item(item: Movie | Show | Episode) -> MediaIdentifiers: def extract_identifiers_from_item(
guids = extract_guids_from_item(item) item: Movie | Show | Episode,
generate_guids: bool,
generate_locations: bool,
) -> MediaIdentifiers:
guids = extract_guids_from_item(item, generate_guids)
locations = (
tuple([filename_from_any_path(loc) for loc in item.locations])
if generate_locations
else tuple()
)
if generate_guids:
if not guids:
logger.debug(
f"Plex: {item.title} has no guids{f', locations: {" ".join(item.locations)}' if generate_locations else ''}",
)
if generate_locations:
if not locations:
logger.debug(
f"Plex: {item.title} has no locations{f', guids: {guids}' if generate_guids else ''}",
)
return MediaIdentifiers( return MediaIdentifiers(
title=item.title, title=item.title,
locations=( locations=locations,
tuple([location.split("/")[-1] for location in item.locations])
if generate_locations
else tuple()
),
imdb_id=guids.get("imdb"), imdb_id=guids.get("imdb"),
tvdb_id=guids.get("tvdb"), tvdb_id=guids.get("tvdb"),
tmdb_id=guids.get("tmdb"), tmdb_id=guids.get("tmdb"),
) )
def get_mediaitem(item: Movie | Episode, completed: bool) -> MediaItem: def get_mediaitem(
item: Movie | Episode,
completed: bool,
generate_guids: bool = True,
generate_locations: bool = True,
) -> MediaItem:
last_viewed_at = item.lastViewedAt
viewed_date = datetime.today()
if last_viewed_at:
viewed_date = last_viewed_at.replace(tzinfo=timezone.utc)
return MediaItem( return MediaItem(
identifiers=extract_identifiers_from_item(item), identifiers=extract_identifiers_from_item(
status=WatchedStatus(completed=completed, time=item.viewOffset), item, generate_guids, generate_locations
),
status=WatchedStatus(
completed=completed, time=item.viewOffset, viewed_date=viewed_date
),
) )
def update_user_watched(
user: MyPlexAccount,
user_plex: PlexServer,
library_data: LibraryData,
library_name: str,
dryrun: bool,
) -> None:
# If there are no movies or shows to update, exit early.
if not library_data.series and not library_data.movies:
return
logger.info(f"Plex: Updating watched for {user.title} in library {library_name}")
library_section = user_plex.library.section(library_name)
if not library_section:
logger.error(
f"Plex: Library {library_name} not found for {user.title}, skipping",
)
return
# Update movies.
if library_data.movies:
# Search for Plex movies that are currently marked as unwatched.
for plex_movie in library_section.search(unwatched=True):
plex_identifiers = extract_identifiers_from_item(plex_movie)
# Check each stored movie for a match.
for stored_movie in library_data.movies:
if check_same_identifiers(plex_identifiers, stored_movie.identifiers):
# If the stored movie is marked as watched (or has enough progress),
# update the Plex movie accordingly.
if stored_movie.status.completed:
msg = f"Plex: {plex_movie.title} as watched for {user.title} in {library_name}"
if not dryrun:
try:
plex_movie.markWatched()
except Exception as e:
logger.error(
f"Plex: Failed to mark {plex_movie.title} as watched, Error: {e}"
)
continue
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_movie.title,
None,
None,
)
else:
msg = f"Plex: {plex_movie.title} as partially watched for {floor(stored_movie.status.time / 60_000)} minutes for {user.title} in {library_name}"
if not dryrun:
try:
plex_movie.updateTimeline(stored_movie.status.time)
except Exception as e:
logger.error(
f"Plex: Failed to update {plex_movie.title} timeline, Error: {e}"
)
continue
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_movie.title,
duration=stored_movie.status.time,
)
# Once matched, no need to check further.
break
# Update TV Shows (series/episodes).
if library_data.series:
# For each Plex show in the library section:
plex_shows = library_section.search(unwatched=True)
for plex_show in plex_shows:
# Extract identifiers from the Plex show.
plex_show_identifiers = extract_identifiers_from_item(plex_show)
# Try to find a matching series in your stored library.
for stored_series in library_data.series:
if check_same_identifiers(
plex_show_identifiers, stored_series.identifiers
):
logger.trace(f"Found matching show for '{plex_show.title}'")
# Now update episodes.
# Get the list of Plex episodes for this show.
plex_episodes = plex_show.episodes()
for plex_episode in plex_episodes:
plex_episode_identifiers = extract_identifiers_from_item(
plex_episode
)
for stored_ep in stored_series.episodes:
if check_same_identifiers(
plex_episode_identifiers, stored_ep.identifiers
):
if stored_ep.status.completed:
msg = f"Plex: {plex_show.title} {plex_episode.title} as watched for {user.title} in {library_name}"
if not dryrun:
try:
plex_episode.markWatched()
except Exception as e:
logger.error(
f"Plex: Failed to mark {plex_show.title} {plex_episode.title} as watched, Error: {e}"
)
continue
logger.success(
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
)
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_show.title,
plex_episode.title,
)
else:
msg = f"Plex: {plex_show.title} {plex_episode.title} as partially watched for {floor(stored_ep.status.time / 60_000)} minutes for {user.title} in {library_name}"
if not dryrun:
try:
plex_episode.updateTimeline(
stored_ep.status.time
)
except Exception as e:
logger.error(
f"Plex: Failed to update {plex_show.title} {plex_episode.title} timeline, Error: {e}"
)
continue
logger.success(
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
)
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_show.title,
plex_episode.title,
stored_ep.status.time,
)
break # Found a matching episode.
break # Found a matching show.
# class plex accept base url and token and username and password but default with none # class plex accept base url and token and username and password but default with none
class Plex: class Plex:
def __init__( def __init__(
self, self,
env,
base_url: str | None = None, base_url: str | None = None,
token: str | None = None, token: str | None = None,
user_name: str | None = None, user_name: str | None = None,
@ -245,6 +128,8 @@ class Plex:
ssl_bypass: bool = False, ssl_bypass: bool = False,
session: requests.Session | None = None, session: requests.Session | None = None,
) -> None: ) -> None:
self.env = env
self.server_type: str = "Plex" self.server_type: str = "Plex"
self.ssl_bypass: bool = ssl_bypass self.ssl_bypass: bool = ssl_bypass
if ssl_bypass: if ssl_bypass:
@ -261,6 +146,12 @@ class Plex:
self.admin_user: MyPlexAccount = self.plex.myPlexAccount() self.admin_user: MyPlexAccount = self.plex.myPlexAccount()
self.users: list[MyPlexUser | MyPlexAccount] = self.get_users() self.users: list[MyPlexUser | MyPlexAccount] = self.get_users()
self.generate_guids: bool = str_to_bool(
get_env_value(self.env, "GENERATE_GUIDS", "True")
)
self.generate_locations: bool = str_to_bool(
get_env_value(self.env, "GENERATE_LOCATIONS", "True")
)
def login( def login(
self, self,
@ -346,7 +237,14 @@ class Plex:
unwatched=False unwatched=False
) + library_videos.search(inProgress=True): ) + library_videos.search(inProgress=True):
if video.isWatched or video.viewOffset >= 60000: if video.isWatched or video.viewOffset >= 60000:
watched.movies.append(get_mediaitem(video, video.isWatched)) watched.movies.append(
get_mediaitem(
video,
video.isWatched,
self.generate_guids,
self.generate_locations,
)
)
elif library.type == "show": elif library.type == "show":
# Keep track of processed shows to reduce duplicate shows # Keep track of processed shows to reduce duplicate shows
@ -357,7 +255,7 @@ class Plex:
if show.key in processed_shows: if show.key in processed_shows:
continue continue
processed_shows.append(show.key) processed_shows.append(show.key)
show_guids = extract_guids_from_item(show) show_guids = extract_guids_from_item(show, self.generate_guids)
episode_mediaitem = [] episode_mediaitem = []
# Fetch watched or partially watched episodes # Fetch watched or partially watched episodes
@ -365,7 +263,12 @@ class Plex:
viewOffset__gte=60_000 viewOffset__gte=60_000
): ):
episode_mediaitem.append( episode_mediaitem.append(
get_mediaitem(episode, episode.isWatched) get_mediaitem(
episode,
episode.isWatched,
self.generate_guids,
self.generate_locations,
)
) )
if episode_mediaitem: if episode_mediaitem:
@ -376,11 +279,11 @@ class Plex:
locations=( locations=(
tuple( tuple(
[ [
location.split("/")[-1] filename_from_any_path(location)
for location in show.locations for location in show.locations
] ]
) )
if generate_locations if self.generate_locations
else tuple() else tuple()
), ),
imdb_id=show_guids.get("imdb"), imdb_id=show_guids.get("imdb"),
@ -400,10 +303,14 @@ class Plex:
return LibraryData(title=library.title) return LibraryData(title=library.title)
def get_watched( def get_watched(
self, users: list[MyPlexUser | MyPlexAccount], sync_libraries: list[str] self,
users: list[MyPlexUser | MyPlexAccount],
sync_libraries: list[str],
users_watched: dict[str, UserData] = None,
) -> dict[str, UserData]: ) -> dict[str, UserData]:
try: try:
users_watched: dict[str, UserData] = {} if not users_watched:
users_watched: dict[str, UserData] = {}
for user in users: for user in users:
if self.admin_user == user: if self.admin_user == user:
@ -428,20 +335,196 @@ class Plex:
if library.title not in sync_libraries: if library.title not in sync_libraries:
continue continue
if user_name not in users_watched:
users_watched[user_name] = UserData()
if library.title in users_watched[user_name].libraries:
logger.info(
f"Plex: {user_name} {library.title} watched history has already been gathered, skipping"
)
continue
library_data = self.get_user_library_watched( library_data = self.get_user_library_watched(
user_name, user_plex, library user_name, user_plex, library
) )
if user_name not in users_watched:
users_watched[user_name] = UserData()
users_watched[user_name].libraries[library.title] = library_data users_watched[user_name].libraries[library.title] = library_data
return users_watched return users_watched
except Exception as e: except Exception as e:
logger.error(f"Plex: Failed to get watched, Error: {e}") logger.error(f"Plex: Failed to get users watched, Error: {e}")
return {} return {}
def update_user_watched(
self,
user: MyPlexAccount,
user_plex: PlexServer,
library_data: LibraryData,
library_name: str,
dryrun: bool,
) -> None:
# If there are no movies or shows to update, exit early.
if not library_data.series and not library_data.movies:
return
logger.info(
f"Plex: Updating watched for {user.title} in library {library_name}"
)
library_section = user_plex.library.section(library_name)
if not library_section:
logger.error(
f"Plex: Library {library_name} not found for {user.title}, skipping",
)
return
# Update movies.
if library_data.movies:
# Search for Plex movies that are currently marked as unwatched.
for plex_movie in library_section.search(unwatched=True):
plex_identifiers = extract_identifiers_from_item(
plex_movie, self.generate_guids, self.generate_locations
)
# Check each stored movie for a match.
for stored_movie in library_data.movies:
if check_same_identifiers(
plex_identifiers, stored_movie.identifiers
):
# If the stored movie is marked as watched (or has enough progress),
# update the Plex movie accordingly.
if stored_movie.status.completed:
msg = f"Plex: {plex_movie.title} as watched for {user.title} in {library_name}"
if not dryrun:
try:
plex_movie.markWatched()
except Exception as e:
logger.error(
f"Plex: Failed to mark {plex_movie.title} as watched, Error: {e}"
)
continue
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_movie.title,
None,
None,
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
)
else:
msg = f"Plex: {plex_movie.title} as partially watched for {floor(stored_movie.status.time / 60_000)} minutes for {user.title} in {library_name}"
if not dryrun:
try:
plex_movie.updateTimeline(stored_movie.status.time)
except Exception as e:
logger.error(
f"Plex: Failed to update {plex_movie.title} timeline, Error: {e}"
)
continue
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_movie.title,
duration=stored_movie.status.time,
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
)
# Once matched, no need to check further.
break
# Update TV Shows (series/episodes).
if library_data.series:
# For each Plex show in the library section:
plex_shows = library_section.search(unwatched=True)
for plex_show in plex_shows:
# Extract identifiers from the Plex show.
plex_show_identifiers = extract_identifiers_from_item(
plex_show, self.generate_guids, self.generate_locations
)
# Try to find a matching series in your stored library.
for stored_series in library_data.series:
if check_same_identifiers(
plex_show_identifiers, stored_series.identifiers
):
logger.trace(f"Found matching show for '{plex_show.title}'")
# Now update episodes.
# Get the list of Plex episodes for this show.
plex_episodes = plex_show.episodes()
for plex_episode in plex_episodes:
plex_episode_identifiers = extract_identifiers_from_item(
plex_episode,
self.generate_guids,
self.generate_locations,
)
for stored_ep in stored_series.episodes:
if check_same_identifiers(
plex_episode_identifiers, stored_ep.identifiers
):
if stored_ep.status.completed:
msg = f"Plex: {plex_show.title} {plex_episode.title} as watched for {user.title} in {library_name}"
if not dryrun:
try:
plex_episode.markWatched()
except Exception as e:
logger.error(
f"Plex: Failed to mark {plex_show.title} {plex_episode.title} as watched, Error: {e}"
)
continue
logger.success(
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
)
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_show.title,
plex_episode.title,
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
)
else:
msg = f"Plex: {plex_show.title} {plex_episode.title} as partially watched for {floor(stored_ep.status.time / 60_000)} minutes for {user.title} in {library_name}"
if not dryrun:
try:
plex_episode.updateTimeline(
stored_ep.status.time
)
except Exception as e:
logger.error(
f"Plex: Failed to update {plex_show.title} {plex_episode.title} timeline, Error: {e}"
)
continue
logger.success(
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
)
log_marked(
"Plex",
user_plex.friendlyName,
user.title,
library_name,
plex_show.title,
plex_episode.title,
stored_ep.status.time,
mark_file=get_env_value(
self.env, "MARK_FILE", "mark.log"
),
)
break # Found a matching episode.
break # Found a matching show.
def update_watched( def update_watched(
self, self,
watched_list: dict[str, UserData], watched_list: dict[str, UserData],
@ -525,7 +608,7 @@ class Plex:
continue continue
try: try:
update_user_watched( self.update_user_watched(
user, user,
user_plex, user_plex,
library_data, library_data,

View File

@ -1,4 +1,5 @@
import copy import copy
from datetime import datetime
from pydantic import BaseModel, Field from pydantic import BaseModel, Field
from loguru import logger from loguru import logger
from typing import Any from typing import Any
@ -21,6 +22,7 @@ class MediaIdentifiers(BaseModel):
class WatchedStatus(BaseModel): class WatchedStatus(BaseModel):
completed: bool completed: bool
time: int time: int
viewed_date: datetime
class MediaItem(BaseModel): class MediaItem(BaseModel):
@ -43,6 +45,111 @@ class UserData(BaseModel):
libraries: dict[str, LibraryData] = Field(default_factory=dict) libraries: dict[str, LibraryData] = Field(default_factory=dict)
def merge_mediaitem_data(ep1: MediaItem, ep2: MediaItem) -> MediaItem:
"""
Merge two MediaItem episodes by comparing their watched status.
If one is completed while the other isn't, choose the completed one.
If both are completed or both are not, choose the one with the higher time.
"""
if ep1.status.completed != ep2.status.completed:
return ep1 if ep1.status.completed else ep2
return ep1 if ep1.status.time >= ep2.status.time else ep2
def merge_series_data(series1: Series, series2: Series) -> Series:
"""
Merge two Series objects by combining their episodes.
For duplicate episodes (determined by check_same_identifiers), merge their watched status.
"""
merged_series = copy.deepcopy(series1)
for ep in series2.episodes:
for idx, merged_ep in enumerate(merged_series.episodes):
if check_same_identifiers(ep.identifiers, merged_ep.identifiers):
merged_series.episodes[idx] = merge_mediaitem_data(merged_ep, ep)
break
else:
merged_series.episodes.append(copy.deepcopy(ep))
return merged_series
def merge_library_data(lib1: LibraryData, lib2: LibraryData) -> LibraryData:
"""
Merge two LibraryData objects by extending movies and merging series.
For series, duplicates are determined using check_same_identifiers.
"""
merged = copy.deepcopy(lib1)
# Merge movies.
for movie in lib2.movies:
for idx, merged_movie in enumerate(merged.movies):
if check_same_identifiers(movie.identifiers, merged_movie.identifiers):
merged.movies[idx] = merge_mediaitem_data(merged_movie, movie)
break
else:
merged.movies.append(copy.deepcopy(movie))
# Merge series.
for series2 in lib2.series:
for idx, series1 in enumerate(merged.series):
if check_same_identifiers(series1.identifiers, series2.identifiers):
merged.series[idx] = merge_series_data(series1, series2)
break
else:
merged.series.append(copy.deepcopy(series2))
return merged
def merge_user_data(user1: UserData, user2: UserData) -> UserData:
"""
Merge two UserData objects by merging their libraries.
If a library exists in both, merge its content; otherwise, add the new library.
"""
merged_libraries = copy.deepcopy(user1.libraries)
for lib_key, lib_data in user2.libraries.items():
if lib_key in merged_libraries:
merged_libraries[lib_key] = merge_library_data(
merged_libraries[lib_key], lib_data
)
else:
merged_libraries[lib_key] = copy.deepcopy(lib_data)
return UserData(libraries=merged_libraries)
def merge_server_watched(
watched_list_1: dict[str, UserData],
watched_list_2: dict[str, UserData],
user_mapping: dict[str, str] | None = None,
library_mapping: dict[str, str] | None = None,
) -> dict[str, UserData]:
"""
Merge two dictionaries of UserData while taking into account possible
differences in user and library keys via the provided mappings.
"""
merged_watched = copy.deepcopy(watched_list_1)
for user_2, user_data in watched_list_2.items():
# Determine matching user key.
user_key = user_mapping.get(user_2, user_2) if user_mapping else user_2
if user_key not in merged_watched:
merged_watched[user_2] = copy.deepcopy(user_data)
continue
for lib_key, lib_data in user_data.libraries.items():
mapped_lib_key = (
library_mapping.get(lib_key, lib_key) if library_mapping else lib_key
)
if mapped_lib_key not in merged_watched[user_key].libraries:
merged_watched[user_key].libraries[lib_key] = copy.deepcopy(lib_data)
else:
merged_watched[user_key].libraries[mapped_lib_key] = merge_library_data(
merged_watched[user_key].libraries[mapped_lib_key],
lib_data,
)
return merged_watched
def check_same_identifiers(item1: MediaIdentifiers, item2: MediaIdentifiers) -> bool: def check_same_identifiers(item1: MediaIdentifiers, item2: MediaIdentifiers) -> bool:
# Check for duplicate based on file locations: # Check for duplicate based on file locations:
if item1.locations and item2.locations: if item1.locations and item2.locations:

View File

@ -1,3 +1,4 @@
from datetime import datetime
import sys import sys
import os import os
@ -23,6 +24,8 @@ from src.watched import (
cleanup_watched, cleanup_watched,
) )
viewed_date = datetime.today()
tv_shows_watched_list_1: list[Series] = [ tv_shows_watched_list_1: list[Series] = [
Series( Series(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -41,7 +44,7 @@ tv_shows_watched_list_1: list[Series] = [
tmdb_id="968589", tmdb_id="968589",
tvdb_id="295296", tvdb_id="295296",
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -51,7 +54,9 @@ tv_shows_watched_list_1: list[Series] = [
tmdb_id="968590", tmdb_id="968590",
tvdb_id="295297", tvdb_id="295297",
), ),
status=WatchedStatus(completed=False, time=240000), status=WatchedStatus(
completed=False, time=240000, viewed_date=viewed_date
),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -61,7 +66,7 @@ tv_shows_watched_list_1: list[Series] = [
tmdb_id="968592", tmdb_id="968592",
tvdb_id="295298", tvdb_id="295298",
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
], ],
), ),
@ -82,7 +87,7 @@ tv_shows_watched_list_1: list[Series] = [
tmdb_id="4661246", tmdb_id="4661246",
tvdb_id="10009418", tvdb_id="10009418",
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -92,7 +97,9 @@ tv_shows_watched_list_1: list[Series] = [
tmdb_id="4712059", tmdb_id="4712059",
tvdb_id="10009419", tvdb_id="10009419",
), ),
status=WatchedStatus(completed=False, time=240000), status=WatchedStatus(
completed=False, time=240000, viewed_date=viewed_date
),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -102,7 +109,7 @@ tv_shows_watched_list_1: list[Series] = [
tmdb_id="4712061", tmdb_id="4712061",
tvdb_id="10009420", tvdb_id="10009420",
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
], ],
), ),
@ -123,7 +130,7 @@ tv_shows_watched_list_1: list[Series] = [
tmdb_id="3070048", tmdb_id="3070048",
tvdb_id="8438181", tvdb_id="8438181",
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -133,7 +140,7 @@ tv_shows_watched_list_1: list[Series] = [
tmdb_id="4568681", tmdb_id="4568681",
tvdb_id="9829910", tvdb_id="9829910",
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -143,7 +150,7 @@ tv_shows_watched_list_1: list[Series] = [
tmdb_id="4497012", tmdb_id="4497012",
tvdb_id="9870382", tvdb_id="9870382",
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
], ],
), ),
@ -170,7 +177,7 @@ tv_shows_watched_list_2: list[Series] = [
tvdb_id="295294", tvdb_id="295294",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -180,7 +187,9 @@ tv_shows_watched_list_2: list[Series] = [
tvdb_id="295295", tvdb_id="295295",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=False, time=300670), status=WatchedStatus(
completed=False, time=300670, viewed_date=viewed_date
),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -190,7 +199,7 @@ tv_shows_watched_list_2: list[Series] = [
tvdb_id="295298", tvdb_id="295298",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
], ],
), ),
@ -211,7 +220,7 @@ tv_shows_watched_list_2: list[Series] = [
tvdb_id="9959300", tvdb_id="9959300",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -221,7 +230,9 @@ tv_shows_watched_list_2: list[Series] = [
tvdb_id="10009417", tvdb_id="10009417",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=False, time=300741), status=WatchedStatus(
completed=False, time=300741, viewed_date=viewed_date
),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -231,7 +242,7 @@ tv_shows_watched_list_2: list[Series] = [
tvdb_id="10009420", tvdb_id="10009420",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
], ],
), ),
@ -252,7 +263,7 @@ tv_shows_watched_list_2: list[Series] = [
tvdb_id="8438181", tvdb_id="8438181",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -262,7 +273,7 @@ tv_shows_watched_list_2: list[Series] = [
tvdb_id="9829910", tvdb_id="9829910",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -272,7 +283,7 @@ tv_shows_watched_list_2: list[Series] = [
tvdb_id="9870382", tvdb_id="9870382",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
], ],
), ),
@ -299,7 +310,7 @@ expected_tv_show_watched_list_1: list[Series] = [
tmdb_id="968589", tmdb_id="968589",
tvdb_id="295296", tvdb_id="295296",
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -309,7 +320,9 @@ expected_tv_show_watched_list_1: list[Series] = [
tmdb_id="968590", tmdb_id="968590",
tvdb_id="295297", tvdb_id="295297",
), ),
status=WatchedStatus(completed=False, time=240000), status=WatchedStatus(
completed=False, time=240000, viewed_date=viewed_date
),
), ),
], ],
), ),
@ -330,7 +343,7 @@ expected_tv_show_watched_list_1: list[Series] = [
tmdb_id="4661246", tmdb_id="4661246",
tvdb_id="10009418", tvdb_id="10009418",
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -340,7 +353,9 @@ expected_tv_show_watched_list_1: list[Series] = [
tmdb_id="4712059", tmdb_id="4712059",
tvdb_id="10009419", tvdb_id="10009419",
), ),
status=WatchedStatus(completed=False, time=240000), status=WatchedStatus(
completed=False, time=240000, viewed_date=viewed_date
),
), ),
], ],
), ),
@ -367,7 +382,7 @@ expected_tv_show_watched_list_2: list[Series] = [
tvdb_id="295294", tvdb_id="295294",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -377,7 +392,9 @@ expected_tv_show_watched_list_2: list[Series] = [
tvdb_id="295295", tvdb_id="295295",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=False, time=300670), status=WatchedStatus(
completed=False, time=300670, viewed_date=viewed_date
),
), ),
], ],
), ),
@ -398,7 +415,7 @@ expected_tv_show_watched_list_2: list[Series] = [
tvdb_id="9959300", tvdb_id="9959300",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -408,7 +425,9 @@ expected_tv_show_watched_list_2: list[Series] = [
tvdb_id="10009417", tvdb_id="10009417",
tmdb_id=None, tmdb_id=None,
), ),
status=WatchedStatus(completed=False, time=300741), status=WatchedStatus(
completed=False, time=300741, viewed_date=viewed_date
),
), ),
], ],
), ),
@ -426,7 +445,7 @@ movies_watched_list_1: list[MediaItem] = [
tmdb_id="10378", tmdb_id="10378",
tvdb_id="12352", tvdb_id="12352",
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -436,7 +455,7 @@ movies_watched_list_1: list[MediaItem] = [
tmdb_id="1029575", tmdb_id="1029575",
tvdb_id="351194", tvdb_id="351194",
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -446,7 +465,7 @@ movies_watched_list_1: list[MediaItem] = [
tmdb_id="466420", tmdb_id="466420",
tvdb_id="135852", tvdb_id="135852",
), ),
status=WatchedStatus(completed=False, time=240000), status=WatchedStatus(completed=False, time=240000, viewed_date=viewed_date),
), ),
] ]
@ -462,7 +481,7 @@ movies_watched_list_2: list[MediaItem] = [
tmdb_id="1029575", tmdb_id="1029575",
tvdb_id=None, tvdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -472,7 +491,7 @@ movies_watched_list_2: list[MediaItem] = [
tmdb_id="507089", tmdb_id="507089",
tvdb_id=None, tvdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -482,7 +501,7 @@ movies_watched_list_2: list[MediaItem] = [
tmdb_id="695721", tmdb_id="695721",
tvdb_id=None, tvdb_id=None,
), ),
status=WatchedStatus(completed=False, time=301215), status=WatchedStatus(completed=False, time=301215, viewed_date=viewed_date),
), ),
] ]
@ -498,7 +517,7 @@ expected_movie_watched_list_1: list[MediaItem] = [
tmdb_id="10378", tmdb_id="10378",
tvdb_id="12352", tvdb_id="12352",
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -508,7 +527,7 @@ expected_movie_watched_list_1: list[MediaItem] = [
tmdb_id="466420", tmdb_id="466420",
tvdb_id="135852", tvdb_id="135852",
), ),
status=WatchedStatus(completed=False, time=240000), status=WatchedStatus(completed=False, time=240000, viewed_date=viewed_date),
), ),
] ]
@ -524,7 +543,7 @@ expected_movie_watched_list_2: list[MediaItem] = [
tmdb_id="507089", tmdb_id="507089",
tvdb_id=None, tvdb_id=None,
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
), ),
MediaItem( MediaItem(
identifiers=MediaIdentifiers( identifiers=MediaIdentifiers(
@ -534,7 +553,7 @@ expected_movie_watched_list_2: list[MediaItem] = [
tmdb_id="695721", tmdb_id="695721",
tvdb_id=None, tvdb_id=None,
), ),
status=WatchedStatus(completed=False, time=301215), status=WatchedStatus(completed=False, time=301215, viewed_date=viewed_date),
), ),
] ]
@ -562,7 +581,7 @@ tv_shows_2_watched_list_1: list[Series] = [
tmdb_id="282843", tmdb_id="282843",
tvdb_id="176357", tvdb_id="176357",
), ),
status=WatchedStatus(completed=True, time=0), status=WatchedStatus(completed=True, time=0, viewed_date=viewed_date),
) )
], ],
) )

339
uv.lock
View File

@ -1,89 +1,97 @@
version = 1 version = 1
revision = 2
requires-python = ">=3.12" requires-python = ">=3.12"
[[package]] [[package]]
name = "annotated-types" name = "annotated-types"
version = "0.7.0" version = "0.7.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
] ]
[[package]] [[package]]
name = "certifi" name = "certifi"
version = "2025.4.26" version = "2025.8.3"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705 } sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618 }, { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" },
] ]
[[package]] [[package]]
name = "charset-normalizer" name = "charset-normalizer"
version = "3.4.2" version = "3.4.3"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367 } sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936 }, { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" },
{ url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790 }, { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" },
{ url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924 }, { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" },
{ url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626 }, { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" },
{ url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567 }, { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" },
{ url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957 }, { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" },
{ url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408 }, { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" },
{ url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399 }, { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" },
{ url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815 }, { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" },
{ url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537 }, { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" },
{ url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565 }, { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" },
{ url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357 }, { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" },
{ url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776 }, { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" },
{ url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622 }, { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" },
{ url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435 }, { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" },
{ url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653 }, { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" },
{ url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231 }, { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" },
{ url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243 }, { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" },
{ url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442 }, { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" },
{ url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147 }, { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" },
{ url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057 }, { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" },
{ url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454 }, { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" },
{ url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174 }, { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" },
{ url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166 }, { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" },
{ url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064 }, { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" },
{ url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641 }, { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" },
{ url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626 }, { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" },
{ url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" },
{ url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" },
{ url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" },
{ url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" },
{ url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" },
{ url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" },
{ url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" },
] ]
[[package]] [[package]]
name = "colorama" name = "colorama"
version = "0.4.6" version = "0.4.6"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
] ]
[[package]] [[package]]
name = "idna" name = "idna"
version = "3.10" version = "3.10"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" },
] ]
[[package]] [[package]]
name = "iniconfig" name = "iniconfig"
version = "2.1.0" version = "2.1.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793 } sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050 }, { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
] ]
[[package]] [[package]]
name = "jellyplex-watched" name = "jellyplex-watched"
version = "7.0.4" version = "8.3.0"
source = { virtual = "." } source = { virtual = "." }
dependencies = [ dependencies = [
{ name = "loguru" }, { name = "loguru" },
@ -108,19 +116,19 @@ lint = [
requires-dist = [ requires-dist = [
{ name = "loguru", specifier = ">=0.7.3" }, { name = "loguru", specifier = ">=0.7.3" },
{ name = "packaging", specifier = "==25.0" }, { name = "packaging", specifier = "==25.0" },
{ name = "plexapi", specifier = "==4.17.0" }, { name = "plexapi", specifier = "==4.17.1" },
{ name = "pydantic", specifier = "==2.11.4" }, { name = "pydantic", specifier = "==2.11.7" },
{ name = "python-dotenv", specifier = "==1.1.0" }, { name = "python-dotenv", specifier = "==1.1.1" },
{ name = "requests", specifier = "==2.32.3" }, { name = "requests", specifier = "==2.32.5" },
] ]
[package.metadata.requires-dev] [package.metadata.requires-dev]
dev = [ dev = [
{ name = "mypy", specifier = ">=1.15.0" }, { name = "mypy", specifier = ">=1.16.1" },
{ name = "pytest", specifier = ">=8.3.5" }, { name = "pytest", specifier = ">=8.4.1" },
{ name = "types-requests", specifier = ">=2.32.0.20250515" }, { name = "types-requests", specifier = ">=2.32.0.20250611" },
] ]
lint = [{ name = "ruff", specifier = ">=0.11.10" }] lint = [{ name = "ruff", specifier = ">=0.12.3" }]
[[package]] [[package]]
name = "loguru" name = "loguru"
@ -130,78 +138,94 @@ dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" }, { name = "colorama", marker = "sys_platform == 'win32'" },
{ name = "win32-setctime", marker = "sys_platform == 'win32'" }, { name = "win32-setctime", marker = "sys_platform == 'win32'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559 } sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559, upload-time = "2024-12-06T11:20:56.608Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595 }, { url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595, upload-time = "2024-12-06T11:20:54.538Z" },
] ]
[[package]] [[package]]
name = "mypy" name = "mypy"
version = "1.15.0" version = "1.18.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "mypy-extensions" }, { name = "mypy-extensions" },
{ name = "pathspec" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/ce/43/d5e49a86afa64bd3839ea0d5b9c7103487007d728e1293f52525d6d5486a/mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43", size = 3239717 } sdist = { url = "https://files.pythonhosted.org/packages/14/a3/931e09fc02d7ba96da65266884da4e4a8806adcdb8a57faaacc6edf1d538/mypy-1.18.1.tar.gz", hash = "sha256:9e988c64ad3ac5987f43f5154f884747faf62141b7f842e87465b45299eea5a9", size = 3448447, upload-time = "2025-09-11T23:00:47.067Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/98/3a/03c74331c5eb8bd025734e04c9840532226775c47a2c39b56a0c8d4f128d/mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd", size = 10793981 }, { url = "https://files.pythonhosted.org/packages/e7/14/1c3f54d606cb88a55d1567153ef3a8bc7b74702f2ff5eb64d0994f9e49cb/mypy-1.18.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:502cde8896be8e638588b90fdcb4c5d5b8c1b004dfc63fd5604a973547367bb9", size = 12911082, upload-time = "2025-09-11T23:00:41.465Z" },
{ url = "https://files.pythonhosted.org/packages/f0/1a/41759b18f2cfd568848a37c89030aeb03534411eef981df621d8fad08a1d/mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f", size = 9749175 }, { url = "https://files.pythonhosted.org/packages/90/83/235606c8b6d50a8eba99773add907ce1d41c068edb523f81eb0d01603a83/mypy-1.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7509549b5e41be279afc1228242d0e397f1af2919a8f2877ad542b199dc4083e", size = 11919107, upload-time = "2025-09-11T22:58:40.903Z" },
{ url = "https://files.pythonhosted.org/packages/12/7e/873481abf1ef112c582db832740f4c11b2bfa510e829d6da29b0ab8c3f9c/mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464", size = 11455675 }, { url = "https://files.pythonhosted.org/packages/ca/25/4e2ce00f8d15b99d0c68a2536ad63e9eac033f723439ef80290ec32c1ff5/mypy-1.18.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5956ecaabb3a245e3f34100172abca1507be687377fe20e24d6a7557e07080e2", size = 12472551, upload-time = "2025-09-11T22:58:37.272Z" },
{ url = "https://files.pythonhosted.org/packages/b3/d0/92ae4cde706923a2d3f2d6c39629134063ff64b9dedca9c1388363da072d/mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee", size = 12410020 }, { url = "https://files.pythonhosted.org/packages/32/bb/92642a9350fc339dd9dcefcf6862d171b52294af107d521dce075f32f298/mypy-1.18.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8750ceb014a96c9890421c83f0db53b0f3b8633e2864c6f9bc0a8e93951ed18d", size = 13340554, upload-time = "2025-09-11T22:59:38.756Z" },
{ url = "https://files.pythonhosted.org/packages/46/8b/df49974b337cce35f828ba6fda228152d6db45fed4c86ba56ffe442434fd/mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e", size = 12498582 }, { url = "https://files.pythonhosted.org/packages/cd/ee/38d01db91c198fb6350025d28f9719ecf3c8f2c55a0094bfbf3ef478cc9a/mypy-1.18.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fb89ea08ff41adf59476b235293679a6eb53a7b9400f6256272fb6029bec3ce5", size = 13530933, upload-time = "2025-09-11T22:59:20.228Z" },
{ url = "https://files.pythonhosted.org/packages/13/50/da5203fcf6c53044a0b699939f31075c45ae8a4cadf538a9069b165c1050/mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22", size = 9366614 }, { url = "https://files.pythonhosted.org/packages/da/8d/6d991ae631f80d58edbf9d7066e3f2a96e479dca955d9a968cd6e90850a3/mypy-1.18.1-cp312-cp312-win_amd64.whl", hash = "sha256:2657654d82fcd2a87e02a33e0d23001789a554059bbf34702d623dafe353eabf", size = 9828426, upload-time = "2025-09-11T23:00:21.007Z" },
{ url = "https://files.pythonhosted.org/packages/6a/9b/fd2e05d6ffff24d912f150b87db9e364fa8282045c875654ce7e32fffa66/mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445", size = 10788592 }, { url = "https://files.pythonhosted.org/packages/e4/ec/ef4a7260e1460a3071628a9277a7579e7da1b071bc134ebe909323f2fbc7/mypy-1.18.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d70d2b5baf9b9a20bc9c730015615ae3243ef47fb4a58ad7b31c3e0a59b5ef1f", size = 12918671, upload-time = "2025-09-11T22:58:29.814Z" },
{ url = "https://files.pythonhosted.org/packages/74/37/b246d711c28a03ead1fd906bbc7106659aed7c089d55fe40dd58db812628/mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d", size = 9753611 }, { url = "https://files.pythonhosted.org/packages/a1/82/0ea6c3953f16223f0b8eda40c1aeac6bd266d15f4902556ae6e91f6fca4c/mypy-1.18.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b8367e33506300f07a43012fc546402f283c3f8bcff1dc338636affb710154ce", size = 11913023, upload-time = "2025-09-11T23:00:29.049Z" },
{ url = "https://files.pythonhosted.org/packages/a6/ac/395808a92e10cfdac8003c3de9a2ab6dc7cde6c0d2a4df3df1b815ffd067/mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5", size = 11438443 }, { url = "https://files.pythonhosted.org/packages/ae/ef/5e2057e692c2690fc27b3ed0a4dbde4388330c32e2576a23f0302bc8358d/mypy-1.18.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:913f668ec50c3337b89df22f973c1c8f0b29ee9e290a8b7fe01cc1ef7446d42e", size = 12473355, upload-time = "2025-09-11T23:00:04.544Z" },
{ url = "https://files.pythonhosted.org/packages/d2/8b/801aa06445d2de3895f59e476f38f3f8d610ef5d6908245f07d002676cbf/mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036", size = 12402541 }, { url = "https://files.pythonhosted.org/packages/98/43/b7e429fc4be10e390a167b0cd1810d41cb4e4add4ae50bab96faff695a3b/mypy-1.18.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a0e70b87eb27b33209fa4792b051c6947976f6ab829daa83819df5f58330c71", size = 13346944, upload-time = "2025-09-11T22:58:23.024Z" },
{ url = "https://files.pythonhosted.org/packages/c7/67/5a4268782eb77344cc613a4cf23540928e41f018a9a1ec4c6882baf20ab8/mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357", size = 12494348 }, { url = "https://files.pythonhosted.org/packages/89/4e/899dba0bfe36bbd5b7c52e597de4cf47b5053d337b6d201a30e3798e77a6/mypy-1.18.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c378d946e8a60be6b6ede48c878d145546fb42aad61df998c056ec151bf6c746", size = 13512574, upload-time = "2025-09-11T22:59:52.152Z" },
{ url = "https://files.pythonhosted.org/packages/83/3e/57bb447f7bbbfaabf1712d96f9df142624a386d98fb026a761532526057e/mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf", size = 9373648 }, { url = "https://files.pythonhosted.org/packages/f5/f8/7661021a5b0e501b76440454d786b0f01bb05d5c4b125fcbda02023d0250/mypy-1.18.1-cp313-cp313-win_amd64.whl", hash = "sha256:2cd2c1e0f3a7465f22731987fff6fc427e3dcbb4ca5f7db5bbeaff2ff9a31f6d", size = 9837684, upload-time = "2025-09-11T22:58:44.454Z" },
{ url = "https://files.pythonhosted.org/packages/09/4e/a7d65c7322c510de2c409ff3828b03354a7c43f5a8ed458a7a131b41c7b9/mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e", size = 2221777 }, { url = "https://files.pythonhosted.org/packages/bf/87/7b173981466219eccc64c107cf8e5ab9eb39cc304b4c07df8e7881533e4f/mypy-1.18.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ba24603c58e34dd5b096dfad792d87b304fc6470cbb1c22fd64e7ebd17edcc61", size = 12900265, upload-time = "2025-09-11T22:59:03.4Z" },
{ url = "https://files.pythonhosted.org/packages/ae/cc/b10e65bae75b18a5ac8f81b1e8e5867677e418f0dd2c83b8e2de9ba96ebd/mypy-1.18.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ed36662fb92ae4cb3cacc682ec6656208f323bbc23d4b08d091eecfc0863d4b5", size = 11942890, upload-time = "2025-09-11T23:00:00.607Z" },
{ url = "https://files.pythonhosted.org/packages/39/d4/aeefa07c44d09f4c2102e525e2031bc066d12e5351f66b8a83719671004d/mypy-1.18.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:040ecc95e026f71a9ad7956fea2724466602b561e6a25c2e5584160d3833aaa8", size = 12472291, upload-time = "2025-09-11T22:59:43.425Z" },
{ url = "https://files.pythonhosted.org/packages/c6/07/711e78668ff8e365f8c19735594ea95938bff3639a4c46a905e3ed8ff2d6/mypy-1.18.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:937e3ed86cb731276706e46e03512547e43c391a13f363e08d0fee49a7c38a0d", size = 13318610, upload-time = "2025-09-11T23:00:17.604Z" },
{ url = "https://files.pythonhosted.org/packages/ca/85/df3b2d39339c31d360ce299b418c55e8194ef3205284739b64962f6074e7/mypy-1.18.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1f95cc4f01c0f1701ca3b0355792bccec13ecb2ec1c469e5b85a6ef398398b1d", size = 13513697, upload-time = "2025-09-11T22:58:59.534Z" },
{ url = "https://files.pythonhosted.org/packages/b1/df/462866163c99ea73bb28f0eb4d415c087e30de5d36ee0f5429d42e28689b/mypy-1.18.1-cp314-cp314-win_amd64.whl", hash = "sha256:e4f16c0019d48941220ac60b893615be2f63afedaba6a0801bdcd041b96991ce", size = 9985739, upload-time = "2025-09-11T22:58:51.644Z" },
{ url = "https://files.pythonhosted.org/packages/e0/1d/4b97d3089b48ef3d904c9ca69fab044475bd03245d878f5f0b3ea1daf7ce/mypy-1.18.1-py3-none-any.whl", hash = "sha256:b76a4de66a0ac01da1be14ecc8ae88ddea33b8380284a9e3eae39d57ebcbe26e", size = 2352212, upload-time = "2025-09-11T22:59:26.576Z" },
] ]
[[package]] [[package]]
name = "mypy-extensions" name = "mypy-extensions"
version = "1.1.0" version = "1.1.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343 } sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963 }, { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" },
] ]
[[package]] [[package]]
name = "packaging" name = "packaging"
version = "25.0" version = "25.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727 } sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469 }, { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
]
[[package]]
name = "pathspec"
version = "0.12.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" },
] ]
[[package]] [[package]]
name = "plexapi" name = "plexapi"
version = "4.17.0" version = "4.17.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "requests" }, { name = "requests" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/94/79/129a01479ae08d934782a4ae2ece5bb1eee7e9576c14cf41b467a403dcb6/plexapi-4.17.0.tar.gz", hash = "sha256:065ff984a9500e049a9cc30927ab3245e518e39edc2f4058e31528be1a0a2aef", size = 154599 } sdist = { url = "https://files.pythonhosted.org/packages/2a/02/1bebd67c3cd94a45f6c3520da971791b66457535c9771d8e0068746d7bc2/plexapi-4.17.1.tar.gz", hash = "sha256:1e5bfb486bb150e058a80ff4fb9aff9e3efce644c56d52bb5297272e005d8241", size = 154746, upload-time = "2025-08-26T00:11:02.819Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/24/42/400828990b1884bb3d18d6cdbd1c26f91f1ca256619d057bd5f5d8a9ec7b/plexapi-4.17.0-py3-none-any.whl", hash = "sha256:cf42a990205c0327a2ab1d2871087a91b50596e6e960b99a185bf657525e6938", size = 166667 }, { url = "https://files.pythonhosted.org/packages/c3/1c/9fdaa0e1f797dde3c3cb56d7b222109009f70380e7f49fc0ff42d5705409/plexapi-4.17.1-py3-none-any.whl", hash = "sha256:9d51adb112a2b0b7aa91a928c8b5c0dfffc0d51108cea67d86fea08cee06c998", size = 166861, upload-time = "2025-08-26T00:11:00.89Z" },
] ]
[[package]] [[package]]
name = "pluggy" name = "pluggy"
version = "1.6.0" version = "1.6.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412 } sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538 }, { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
] ]
[[package]] [[package]]
name = "pydantic" name = "pydantic"
version = "2.11.4" version = "2.11.7"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "annotated-types" }, { name = "annotated-types" },
@ -209,9 +233,9 @@ dependencies = [
{ name = "typing-extensions" }, { name = "typing-extensions" },
{ name = "typing-inspection" }, { name = "typing-inspection" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/77/ab/5250d56ad03884ab5efd07f734203943c8a8ab40d551e208af81d0257bf2/pydantic-2.11.4.tar.gz", hash = "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d", size = 786540 } sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/e7/12/46b65f3534d099349e38ef6ec98b1a5a81f42536d17e0ba382c28c67ba67/pydantic-2.11.4-py3-none-any.whl", hash = "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb", size = 443900 }, { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" },
] ]
[[package]] [[package]]
@ -221,68 +245,78 @@ source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195 } sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000 }, { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" },
{ url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996 }, { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" },
{ url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957 }, { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" },
{ url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199 }, { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" },
{ url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296 }, { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" },
{ url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109 }, { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" },
{ url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028 }, { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" },
{ url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044 }, { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" },
{ url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881 }, { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" },
{ url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034 }, { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" },
{ url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187 }, { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" },
{ url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628 }, { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" },
{ url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866 }, { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" },
{ url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894 }, { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" },
{ url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688 }, { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" },
{ url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808 }, { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" },
{ url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580 }, { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" },
{ url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859 }, { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" },
{ url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810 }, { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" },
{ url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498 }, { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" },
{ url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611 }, { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" },
{ url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924 }, { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" },
{ url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196 }, { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" },
{ url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389 }, { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" },
{ url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223 }, { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" },
{ url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473 }, { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" },
{ url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269 }, { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" },
{ url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921 }, { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" },
{ url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162 }, { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" },
{ url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560 }, { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" },
{ url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777 }, { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" },
]
[[package]]
name = "pygments"
version = "2.19.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
] ]
[[package]] [[package]]
name = "pytest" name = "pytest"
version = "8.3.5" version = "8.4.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "colorama", marker = "sys_platform == 'win32'" }, { name = "colorama", marker = "sys_platform == 'win32'" },
{ name = "iniconfig" }, { name = "iniconfig" },
{ name = "packaging" }, { name = "packaging" },
{ name = "pluggy" }, { name = "pluggy" },
{ name = "pygments" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891 } sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634 }, { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" },
] ]
[[package]] [[package]]
name = "python-dotenv" name = "python-dotenv"
version = "1.1.0" version = "1.1.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920 } sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256 }, { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" },
] ]
[[package]] [[package]]
name = "requests" name = "requests"
version = "2.32.3" version = "2.32.5"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "certifi" }, { name = "certifi" },
@ -290,83 +324,84 @@ dependencies = [
{ name = "idna" }, { name = "idna" },
{ name = "urllib3" }, { name = "urllib3" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
] ]
[[package]] [[package]]
name = "ruff" name = "ruff"
version = "0.11.10" version = "0.13.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e8/4c/4a3c5a97faaae6b428b336dcca81d03ad04779f8072c267ad2bd860126bf/ruff-0.11.10.tar.gz", hash = "sha256:d522fb204b4959909ecac47da02830daec102eeb100fb50ea9554818d47a5fa6", size = 4165632 } sdist = { url = "https://files.pythonhosted.org/packages/6e/1a/1f4b722862840295bcaba8c9e5261572347509548faaa99b2d57ee7bfe6a/ruff-0.13.0.tar.gz", hash = "sha256:5b4b1ee7eb35afae128ab94459b13b2baaed282b1fb0f472a73c82c996c8ae60", size = 5372863, upload-time = "2025-09-10T16:25:37.917Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/2f/9f/596c628f8824a2ce4cd12b0f0b4c0629a62dfffc5d0f742c19a1d71be108/ruff-0.11.10-py3-none-linux_armv6l.whl", hash = "sha256:859a7bfa7bc8888abbea31ef8a2b411714e6a80f0d173c2a82f9041ed6b50f58", size = 10316243 }, { url = "https://files.pythonhosted.org/packages/ac/fe/6f87b419dbe166fd30a991390221f14c5b68946f389ea07913e1719741e0/ruff-0.13.0-py3-none-linux_armv6l.whl", hash = "sha256:137f3d65d58ee828ae136a12d1dc33d992773d8f7644bc6b82714570f31b2004", size = 12187826, upload-time = "2025-09-10T16:24:39.5Z" },
{ url = "https://files.pythonhosted.org/packages/3c/38/c1e0b77ab58b426f8c332c1d1d3432d9fc9a9ea622806e208220cb133c9e/ruff-0.11.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:968220a57e09ea5e4fd48ed1c646419961a0570727c7e069842edd018ee8afed", size = 11083636 }, { url = "https://files.pythonhosted.org/packages/e4/25/c92296b1fc36d2499e12b74a3fdb230f77af7bdf048fad7b0a62e94ed56a/ruff-0.13.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:21ae48151b66e71fd111b7d79f9ad358814ed58c339631450c66a4be33cc28b9", size = 12933428, upload-time = "2025-09-10T16:24:43.866Z" },
{ url = "https://files.pythonhosted.org/packages/23/41/b75e15961d6047d7fe1b13886e56e8413be8467a4e1be0a07f3b303cd65a/ruff-0.11.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1067245bad978e7aa7b22f67113ecc6eb241dca0d9b696144256c3a879663bca", size = 10441624 }, { url = "https://files.pythonhosted.org/packages/44/cf/40bc7221a949470307d9c35b4ef5810c294e6cfa3caafb57d882731a9f42/ruff-0.13.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:64de45f4ca5441209e41742d527944635a05a6e7c05798904f39c85bafa819e3", size = 12095543, upload-time = "2025-09-10T16:24:46.638Z" },
{ url = "https://files.pythonhosted.org/packages/b6/2c/e396b6703f131406db1811ea3d746f29d91b41bbd43ad572fea30da1435d/ruff-0.11.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4854fd09c7aed5b1590e996a81aeff0c9ff51378b084eb5a0b9cd9518e6cff2", size = 10624358 }, { url = "https://files.pythonhosted.org/packages/f1/03/8b5ff2a211efb68c63a1d03d157e924997ada87d01bebffbd13a0f3fcdeb/ruff-0.13.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b2c653ae9b9d46e0ef62fc6fbf5b979bda20a0b1d2b22f8f7eb0cde9f4963b8", size = 12312489, upload-time = "2025-09-10T16:24:49.556Z" },
{ url = "https://files.pythonhosted.org/packages/bd/8c/ee6cca8bdaf0f9a3704796022851a33cd37d1340bceaf4f6e991eb164e2e/ruff-0.11.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b4564e9f99168c0f9195a0fd5fa5928004b33b377137f978055e40008a082c5", size = 10176850 }, { url = "https://files.pythonhosted.org/packages/37/fc/2336ef6d5e9c8d8ea8305c5f91e767d795cd4fc171a6d97ef38a5302dadc/ruff-0.13.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4cec632534332062bc9eb5884a267b689085a1afea9801bf94e3ba7498a2d207", size = 11991631, upload-time = "2025-09-10T16:24:53.439Z" },
{ url = "https://files.pythonhosted.org/packages/e9/ce/4e27e131a434321b3b7c66512c3ee7505b446eb1c8a80777c023f7e876e6/ruff-0.11.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b6a9cc5b62c03cc1fea0044ed8576379dbaf751d5503d718c973d5418483641", size = 11759787 }, { url = "https://files.pythonhosted.org/packages/39/7f/f6d574d100fca83d32637d7f5541bea2f5e473c40020bbc7fc4a4d5b7294/ruff-0.13.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dcd628101d9f7d122e120ac7c17e0a0f468b19bc925501dbe03c1cb7f5415b24", size = 13720602, upload-time = "2025-09-10T16:24:56.392Z" },
{ url = "https://files.pythonhosted.org/packages/58/de/1e2e77fc72adc7cf5b5123fd04a59ed329651d3eab9825674a9e640b100b/ruff-0.11.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:607ecbb6f03e44c9e0a93aedacb17b4eb4f3563d00e8b474298a201622677947", size = 12430479 }, { url = "https://files.pythonhosted.org/packages/fd/c8/a8a5b81d8729b5d1f663348d11e2a9d65a7a9bd3c399763b1a51c72be1ce/ruff-0.13.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:afe37db8e1466acb173bb2a39ca92df00570e0fd7c94c72d87b51b21bb63efea", size = 14697751, upload-time = "2025-09-10T16:24:59.89Z" },
{ url = "https://files.pythonhosted.org/packages/07/ed/af0f2340f33b70d50121628ef175523cc4c37619e98d98748c85764c8d88/ruff-0.11.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b3a522fa389402cd2137df9ddefe848f727250535c70dafa840badffb56b7a4", size = 11919760 }, { url = "https://files.pythonhosted.org/packages/57/f5/183ec292272ce7ec5e882aea74937f7288e88ecb500198b832c24debc6d3/ruff-0.13.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f96a8d90bb258d7d3358b372905fe7333aaacf6c39e2408b9f8ba181f4b6ef2", size = 14095317, upload-time = "2025-09-10T16:25:03.025Z" },
{ url = "https://files.pythonhosted.org/packages/24/09/d7b3d3226d535cb89234390f418d10e00a157b6c4a06dfbe723e9322cb7d/ruff-0.11.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f071b0deed7e9245d5820dac235cbdd4ef99d7b12ff04c330a241ad3534319f", size = 14041747 }, { url = "https://files.pythonhosted.org/packages/9f/8d/7f9771c971724701af7926c14dab31754e7b303d127b0d3f01116faef456/ruff-0.13.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b5e3d883e4f924c5298e3f2ee0f3085819c14f68d1e5b6715597681433f153", size = 13144418, upload-time = "2025-09-10T16:25:06.272Z" },
{ url = "https://files.pythonhosted.org/packages/62/b3/a63b4e91850e3f47f78795e6630ee9266cb6963de8f0191600289c2bb8f4/ruff-0.11.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a60e3a0a617eafba1f2e4186d827759d65348fa53708ca547e384db28406a0b", size = 11550657 }, { url = "https://files.pythonhosted.org/packages/a8/a6/7985ad1778e60922d4bef546688cd8a25822c58873e9ff30189cfe5dc4ab/ruff-0.13.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03447f3d18479df3d24917a92d768a89f873a7181a064858ea90a804a7538991", size = 13370843, upload-time = "2025-09-10T16:25:09.965Z" },
{ url = "https://files.pythonhosted.org/packages/46/63/a4f95c241d79402ccdbdb1d823d156c89fbb36ebfc4289dce092e6c0aa8f/ruff-0.11.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:da8ec977eaa4b7bf75470fb575bea2cb41a0e07c7ea9d5a0a97d13dbca697bf2", size = 10489671 }, { url = "https://files.pythonhosted.org/packages/64/1c/bafdd5a7a05a50cc51d9f5711da704942d8dd62df3d8c70c311e98ce9f8a/ruff-0.13.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:fbc6b1934eb1c0033da427c805e27d164bb713f8e273a024a7e86176d7f462cf", size = 13321891, upload-time = "2025-09-10T16:25:12.969Z" },
{ url = "https://files.pythonhosted.org/packages/6a/9b/c2238bfebf1e473495659c523d50b1685258b6345d5ab0b418ca3f010cd7/ruff-0.11.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ddf8967e08227d1bd95cc0851ef80d2ad9c7c0c5aab1eba31db49cf0a7b99523", size = 10160135 }, { url = "https://files.pythonhosted.org/packages/bc/3e/7817f989cb9725ef7e8d2cee74186bf90555279e119de50c750c4b7a72fe/ruff-0.13.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a8ab6a3e03665d39d4a25ee199d207a488724f022db0e1fe4002968abdb8001b", size = 12119119, upload-time = "2025-09-10T16:25:16.621Z" },
{ url = "https://files.pythonhosted.org/packages/ba/ef/ba7251dd15206688dbfba7d413c0312e94df3b31b08f5d695580b755a899/ruff-0.11.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5a94acf798a82db188f6f36575d80609072b032105d114b0f98661e1679c9125", size = 11170179 }, { url = "https://files.pythonhosted.org/packages/58/07/9df080742e8d1080e60c426dce6e96a8faf9a371e2ce22eef662e3839c95/ruff-0.13.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d2a5c62f8ccc6dd2fe259917482de7275cecc86141ee10432727c4816235bc41", size = 11961594, upload-time = "2025-09-10T16:25:19.49Z" },
{ url = "https://files.pythonhosted.org/packages/73/9f/5c336717293203ba275dbfa2ea16e49b29a9fd9a0ea8b6febfc17e133577/ruff-0.11.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3afead355f1d16d95630df28d4ba17fb2cb9c8dfac8d21ced14984121f639bad", size = 11626021 }, { url = "https://files.pythonhosted.org/packages/6a/f4/ae1185349197d26a2316840cb4d6c3fba61d4ac36ed728bf0228b222d71f/ruff-0.13.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b7b85ca27aeeb1ab421bc787009831cffe6048faae08ad80867edab9f2760945", size = 12933377, upload-time = "2025-09-10T16:25:22.371Z" },
{ url = "https://files.pythonhosted.org/packages/d9/2b/162fa86d2639076667c9aa59196c020dc6d7023ac8f342416c2f5ec4bda0/ruff-0.11.10-py3-none-win32.whl", hash = "sha256:dc061a98d32a97211af7e7f3fa1d4ca2fcf919fb96c28f39551f35fc55bdbc19", size = 10494958 }, { url = "https://files.pythonhosted.org/packages/b6/39/e776c10a3b349fc8209a905bfb327831d7516f6058339a613a8d2aaecacd/ruff-0.13.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:79ea0c44a3032af768cabfd9616e44c24303af49d633b43e3a5096e009ebe823", size = 13418555, upload-time = "2025-09-10T16:25:25.681Z" },
{ url = "https://files.pythonhosted.org/packages/24/f3/66643d8f32f50a4b0d09a4832b7d919145ee2b944d43e604fbd7c144d175/ruff-0.11.10-py3-none-win_amd64.whl", hash = "sha256:5cc725fbb4d25b0f185cb42df07ab6b76c4489b4bfb740a175f3a59c70e8a224", size = 11650285 }, { url = "https://files.pythonhosted.org/packages/46/09/dca8df3d48e8b3f4202bf20b1658898e74b6442ac835bfe2c1816d926697/ruff-0.13.0-py3-none-win32.whl", hash = "sha256:4e473e8f0e6a04e4113f2e1de12a5039579892329ecc49958424e5568ef4f768", size = 12141613, upload-time = "2025-09-10T16:25:28.664Z" },
{ url = "https://files.pythonhosted.org/packages/95/3a/2e8704d19f376c799748ff9cb041225c1d59f3e7711bc5596c8cfdc24925/ruff-0.11.10-py3-none-win_arm64.whl", hash = "sha256:ef69637b35fb8b210743926778d0e45e1bffa850a7c61e428c6b971549b5f5d1", size = 10765278 }, { url = "https://files.pythonhosted.org/packages/61/21/0647eb71ed99b888ad50e44d8ec65d7148babc0e242d531a499a0bbcda5f/ruff-0.13.0-py3-none-win_amd64.whl", hash = "sha256:48e5c25c7a3713eea9ce755995767f4dcd1b0b9599b638b12946e892123d1efb", size = 13258250, upload-time = "2025-09-10T16:25:31.773Z" },
{ url = "https://files.pythonhosted.org/packages/e1/a3/03216a6a86c706df54422612981fb0f9041dbb452c3401501d4a22b942c9/ruff-0.13.0-py3-none-win_arm64.whl", hash = "sha256:ab80525317b1e1d38614addec8ac954f1b3e662de9d59114ecbf771d00cf613e", size = 12312357, upload-time = "2025-09-10T16:25:35.595Z" },
] ]
[[package]] [[package]]
name = "types-requests" name = "types-requests"
version = "2.32.0.20250515" version = "2.32.4.20250809"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "urllib3" }, { name = "urllib3" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/06/c1/cdc4f9b8cfd9130fbe6276db574f114541f4231fcc6fb29648289e6e3390/types_requests-2.32.0.20250515.tar.gz", hash = "sha256:09c8b63c11318cb2460813871aaa48b671002e59fda67ca909e9883777787581", size = 23012 } sdist = { url = "https://files.pythonhosted.org/packages/ed/b0/9355adb86ec84d057fea765e4c49cce592aaf3d5117ce5609a95a7fc3dac/types_requests-2.32.4.20250809.tar.gz", hash = "sha256:d8060de1c8ee599311f56ff58010fb4902f462a1470802cf9f6ed27bc46c4df3", size = 23027, upload-time = "2025-08-09T03:17:10.664Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/fe/0f/68a997c73a129287785f418c1ebb6004f81e46b53b3caba88c0e03fcd04a/types_requests-2.32.0.20250515-py3-none-any.whl", hash = "sha256:f8eba93b3a892beee32643ff836993f15a785816acca21ea0ffa006f05ef0fb2", size = 20635 }, { url = "https://files.pythonhosted.org/packages/2b/6f/ec0012be842b1d888d46884ac5558fd62aeae1f0ec4f7a581433d890d4b5/types_requests-2.32.4.20250809-py3-none-any.whl", hash = "sha256:f73d1832fb519ece02c85b1f09d5f0dd3108938e7d47e7f94bbfa18a6782b163", size = 20644, upload-time = "2025-08-09T03:17:09.716Z" },
] ]
[[package]] [[package]]
name = "typing-extensions" name = "typing-extensions"
version = "4.13.2" version = "4.15.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967 } sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806 }, { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
] ]
[[package]] [[package]]
name = "typing-inspection" name = "typing-inspection"
version = "0.4.0" version = "0.4.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222 } sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125 }, { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" },
] ]
[[package]] [[package]]
name = "urllib3" name = "urllib3"
version = "2.4.0" version = "2.5.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672 } sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680 }, { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
] ]
[[package]] [[package]]
name = "win32-setctime" name = "win32-setctime"
version = "1.2.0" version = "1.2.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/705086c9d734d3b663af0e9bb3d4de6578d08f46b1b101c2442fd9aecaa2/win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0", size = 4867 } sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/705086c9d734d3b663af0e9bb3d4de6578d08f46b1b101c2442fd9aecaa2/win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0", size = 4867, upload-time = "2024-12-07T15:28:28.314Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083 }, { url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083, upload-time = "2024-12-07T15:28:26.465Z" },
] ]