Compare commits
96 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
81ba9bd7f9 | ||
|
|
d15759570e | ||
|
|
1b88ecf2eb | ||
|
|
c62809c615 | ||
|
|
899a6b05a4 | ||
|
|
fcd6103e17 | ||
|
|
ac5be474f8 | ||
|
|
d15f29b772 | ||
|
|
c9944866f8 | ||
|
|
846e18fffe | ||
|
|
eb09de2bdf | ||
|
|
c0e207924c | ||
|
|
e48533dfbd | ||
|
|
8503b087b2 | ||
|
|
305fea8f9a | ||
|
|
588c23ce41 | ||
|
|
8f4a2e2690 | ||
|
|
38e65f5a17 | ||
|
|
de32d59aa1 | ||
|
|
998f2b1209 | ||
|
|
0b02f531c1 | ||
|
|
e589935b37 | ||
|
|
031d43e980 | ||
|
|
ba6cad13f6 | ||
|
|
f3801a0bd2 | ||
|
|
196a49fca4 | ||
|
|
4d0f1d303f | ||
|
|
ce5b810a5b | ||
|
|
a1e1ccde42 | ||
|
|
bf633c75d1 | ||
|
|
46fa5e7c9a | ||
|
|
170757aca1 | ||
|
|
9786e9e27d | ||
|
|
8b691b7bfa | ||
|
|
e1c65fc082 | ||
|
|
58749a4fb8 | ||
|
|
51ec69f651 | ||
|
|
4771f736b0 | ||
|
|
8d7436579e | ||
|
|
43e1df98b1 | ||
|
|
3017030f52 | ||
|
|
348a0b8226 | ||
|
|
4e60c08120 | ||
|
|
10b58379cd | ||
|
|
fa9201b20f | ||
|
|
86f72997b4 | ||
|
|
62d0319aad | ||
|
|
a096a09eb7 | ||
|
|
7294241fed | ||
|
|
a5995d3999 | ||
|
|
30f31b2f3f | ||
|
|
bc09c873e9 | ||
|
|
8428be9dda | ||
|
|
6a45ad18f9 | ||
|
|
023b638729 | ||
|
|
7e13c14636 | ||
|
|
0c218fa9dd | ||
|
|
b3b0ccac73 | ||
|
|
fa0134551f | ||
|
|
34d62c9021 | ||
|
|
920bbbb3be | ||
|
|
762e5f10da | ||
|
|
27797cb361 | ||
|
|
066f9d1f66 | ||
|
|
acf7c2cdf2 | ||
|
|
469857a31a | ||
|
|
405e5decf2 | ||
|
|
da9abf8a24 | ||
|
|
128c6a1c76 | ||
|
|
99f32c10ef | ||
|
|
44e42f99db | ||
|
|
b1639eab0f | ||
|
|
679d3535b1 | ||
|
|
a795d4bba5 | ||
|
|
0a025cf5fa | ||
|
|
6a1ceb4db3 | ||
|
|
99c339c405 | ||
|
|
bd75d865ba | ||
|
|
d30e03b702 | ||
|
|
3b749faefb | ||
|
|
74f29d44b3 | ||
|
|
a397ceb54e | ||
|
|
502b3616df | ||
|
|
1a7178e32d | ||
|
|
7119956ec7 | ||
|
|
24035e217e | ||
|
|
21ffce674f | ||
|
|
4185f5fc94 | ||
|
|
3fdcc99304 | ||
|
|
0fa2a698ac | ||
|
|
2b871c58ed | ||
|
|
5078243938 | ||
|
|
b67e6d7257 | ||
|
|
632dfbcadb | ||
|
|
1f7da2f609 | ||
|
|
4a4c9f9ccf |
@@ -1,3 +1,4 @@
|
|||||||
|
.venv
|
||||||
.dockerignore
|
.dockerignore
|
||||||
.env
|
.env
|
||||||
.env.sample
|
.env.sample
|
||||||
@@ -9,7 +10,4 @@
|
|||||||
|
|
||||||
Dockerfile*
|
Dockerfile*
|
||||||
README.md
|
README.md
|
||||||
|
|
||||||
test
|
test
|
||||||
|
|
||||||
venv
|
|
||||||
38
.env.sample
38
.env.sample
@@ -35,7 +35,7 @@ GENERATE_GUIDS = "True"
|
|||||||
GENERATE_LOCATIONS = "True"
|
GENERATE_LOCATIONS = "True"
|
||||||
|
|
||||||
## Max threads for processing
|
## Max threads for processing
|
||||||
MAX_THREADS = 32
|
MAX_THREADS = 2
|
||||||
|
|
||||||
## Map usernames between servers in the event that they are different, order does not matter
|
## Map usernames between servers in the event that they are different, order does not matter
|
||||||
## Comma separated for multiple options
|
## Comma separated for multiple options
|
||||||
@@ -55,7 +55,6 @@ MAX_THREADS = 32
|
|||||||
WHITELIST_USERS = "testuser1,testuser2"
|
WHITELIST_USERS = "testuser1,testuser2"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Plex
|
# Plex
|
||||||
|
|
||||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||||
@@ -77,13 +76,6 @@ PLEX_TOKEN = "SuperSecretToken, SuperSecretToken2"
|
|||||||
## Set to True if running into ssl certificate errors
|
## Set to True if running into ssl certificate errors
|
||||||
SSL_BYPASS = "False"
|
SSL_BYPASS = "False"
|
||||||
|
|
||||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
|
||||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
|
||||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
|
||||||
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
|
||||||
SYNC_FROM_PLEX_TO_PLEX = "True"
|
|
||||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
|
||||||
|
|
||||||
|
|
||||||
# Jellyfin
|
# Jellyfin
|
||||||
|
|
||||||
@@ -94,3 +86,31 @@ JELLYFIN_BASEURL = "http://localhost:8096, http://nas:8096"
|
|||||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||||
## Comma separated list for multiple servers
|
## Comma separated list for multiple servers
|
||||||
JELLYFIN_TOKEN = "SuperSecretToken, SuperSecretToken2"
|
JELLYFIN_TOKEN = "SuperSecretToken, SuperSecretToken2"
|
||||||
|
|
||||||
|
|
||||||
|
# Emby
|
||||||
|
|
||||||
|
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
EMBY_BASEURL = "http://localhost:8097"
|
||||||
|
|
||||||
|
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||||
|
|
||||||
|
|
||||||
|
# Syncing Options
|
||||||
|
|
||||||
|
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||||
|
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||||
|
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||||
|
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_PLEX_TO_EMBY = "True"
|
||||||
|
|
||||||
|
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||||
|
SYNC_FROM_JELLYFIN_TO_EMBY = "True"
|
||||||
|
|
||||||
|
SYNC_FROM_EMBY_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_EMBY_TO_JELLYFIN = "True"
|
||||||
|
SYNC_FROM_EMBY_TO_EMBY = "True"
|
||||||
15
.github/FUNDING.yml
vendored
Normal file
15
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
# These are supported funding model platforms
|
||||||
|
|
||||||
|
github: [Luigi311]
|
||||||
|
patreon: # Replace with a single Patreon username
|
||||||
|
open_collective: # Replace with a single Open Collective username
|
||||||
|
ko_fi: # Replace with a single Ko-fi username
|
||||||
|
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||||
|
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||||
|
liberapay: # Replace with a single Liberapay username
|
||||||
|
issuehunt: # Replace with a single IssueHunt username
|
||||||
|
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
|
||||||
|
polar: # Replace with a single Polar username
|
||||||
|
buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
|
||||||
|
thanks_dev: # Replace with a single thanks.dev username
|
||||||
|
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||||
112
.github/workflows/ci.yml
vendored
112
.github/workflows/ci.yml
vendored
@@ -10,26 +10,45 @@ on:
|
|||||||
- .gitignore
|
- .gitignore
|
||||||
- "*.md"
|
- "*.md"
|
||||||
|
|
||||||
|
env:
|
||||||
|
PYTHON_VERSION: '3.13'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pytest:
|
pytest:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@v5
|
||||||
|
|
||||||
|
- name: "Set up Python"
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version-file: ".python-version"
|
||||||
|
|
||||||
- name: "Install dependencies"
|
- name: "Install dependencies"
|
||||||
run: pip install -r requirements.txt && pip install -r test/requirements.txt
|
run: uv sync --all-extras --dev
|
||||||
|
|
||||||
- name: "Run tests"
|
- name: "Run tests"
|
||||||
run: pytest -vvv
|
run: uv run pytest -vvv
|
||||||
|
|
||||||
test:
|
test:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@v5
|
||||||
|
|
||||||
|
- name: "Set up Python"
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version-file: ".python-version"
|
||||||
|
|
||||||
- name: "Install dependencies"
|
- name: "Install dependencies"
|
||||||
run: |
|
run: |
|
||||||
pip install -r requirements.txt
|
uv sync --all-extras --dev
|
||||||
sudo apt update && sudo apt install -y docker-compose
|
sudo apt update && sudo apt install -y docker-compose
|
||||||
|
|
||||||
- name: "Checkout JellyPlex-Watched-CI"
|
- name: "Checkout JellyPlex-Watched-CI"
|
||||||
@@ -40,43 +59,67 @@ jobs:
|
|||||||
|
|
||||||
- name: "Start containers"
|
- name: "Start containers"
|
||||||
run: |
|
run: |
|
||||||
export PGID=$(id -g)
|
JellyPlex-Watched-CI/start_containers.sh
|
||||||
export PUID=$(id -u)
|
|
||||||
|
|
||||||
sudo chown -R $PUID:$PGID JellyPlex-Watched-CI
|
|
||||||
|
|
||||||
docker pull lscr.io/linuxserver/plex &
|
|
||||||
docker pull lscr.io/linuxserver/jellyfin &
|
|
||||||
|
|
||||||
wait
|
|
||||||
|
|
||||||
docker-compose -f JellyPlex-Watched-CI/plex/docker-compose.yml up -d
|
|
||||||
docker-compose -f JellyPlex-Watched-CI/jellyfin/docker-compose.yml up -d
|
|
||||||
# Wait for containers to start
|
# Wait for containers to start
|
||||||
sleep 10
|
sleep 10
|
||||||
|
|
||||||
docker-compose -f JellyPlex-Watched-CI/plex/docker-compose.yml logs
|
for FOLDER in $(find "JellyPlex-Watched-CI" -type f -name "docker-compose.yml" -exec dirname {} \;); do
|
||||||
docker-compose -f JellyPlex-Watched-CI/jellyfin/docker-compose.yml logs
|
docker compose -f "${FOLDER}/docker-compose.yml" logs
|
||||||
|
done
|
||||||
|
|
||||||
- name: "Run tests"
|
- name: "Test Plex"
|
||||||
run: |
|
run: |
|
||||||
# Test ci1
|
mv test/ci_plex.env .env
|
||||||
mv test/ci1.env .env
|
uv run main.py
|
||||||
python main.py
|
uv run test/validate_ci_marklog.py --plex
|
||||||
|
|
||||||
|
rm mark.log
|
||||||
|
|
||||||
|
- name: "Test Jellyfin"
|
||||||
|
run: |
|
||||||
|
mv test/ci_jellyfin.env .env
|
||||||
|
uv run main.py
|
||||||
|
uv run test/validate_ci_marklog.py --jellyfin
|
||||||
|
|
||||||
|
rm mark.log
|
||||||
|
|
||||||
# Test ci2
|
- name: "Test Emby"
|
||||||
mv test/ci2.env .env
|
run: |
|
||||||
python main.py
|
mv test/ci_emby.env .env
|
||||||
|
uv run main.py
|
||||||
|
uv run test/validate_ci_marklog.py --emby
|
||||||
|
|
||||||
|
rm mark.log
|
||||||
|
|
||||||
# Test ci3
|
- name: "Test Guids"
|
||||||
mv test/ci3.env .env
|
run: |
|
||||||
python main.py
|
mv test/ci_guids.env .env
|
||||||
|
uv run main.py
|
||||||
|
uv run test/validate_ci_marklog.py --guids
|
||||||
|
|
||||||
|
rm mark.log
|
||||||
|
|
||||||
|
- name: "Test Locations"
|
||||||
|
run: |
|
||||||
|
mv test/ci_locations.env .env
|
||||||
|
uv run main.py
|
||||||
|
uv run test/validate_ci_marklog.py --locations
|
||||||
|
|
||||||
|
rm mark.log
|
||||||
|
|
||||||
|
- name: "Test writing to the servers"
|
||||||
|
run: |
|
||||||
|
# Test writing to the servers
|
||||||
|
mv test/ci_write.env .env
|
||||||
|
uv run main.py
|
||||||
|
|
||||||
# Test again to test if it can handle existing data
|
# Test again to test if it can handle existing data
|
||||||
python main.py
|
uv run main.py
|
||||||
|
|
||||||
cat mark.log
|
uv run test/validate_ci_marklog.py --write
|
||||||
python test/validate_ci_marklog.py
|
|
||||||
|
rm mark.log
|
||||||
|
|
||||||
docker:
|
docker:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -105,18 +148,23 @@ jobs:
|
|||||||
${{ secrets.DOCKER_USERNAME }}/jellyplex-watched,enable=${{ secrets.DOCKER_USERNAME != '' }}
|
${{ secrets.DOCKER_USERNAME }}/jellyplex-watched,enable=${{ secrets.DOCKER_USERNAME != '' }}
|
||||||
# Do not push to ghcr.io on PRs due to permission issues, only push if the owner is luigi311 so it doesnt fail on forks
|
# Do not push to ghcr.io on PRs due to permission issues, only push if the owner is luigi311 so it doesnt fail on forks
|
||||||
ghcr.io/${{ github.repository }},enable=${{ github.event_name != 'pull_request' && github.repository_owner == 'luigi311'}}
|
ghcr.io/${{ github.repository }},enable=${{ github.event_name != 'pull_request' && github.repository_owner == 'luigi311'}}
|
||||||
|
flavor: latest=false
|
||||||
tags: |
|
tags: |
|
||||||
type=raw,value=latest,enable=${{ matrix.variant == env.DEFAULT_VARIANT && github.ref_name == github.event.repository.default_branch }}
|
type=raw,value=latest,enable=${{ matrix.variant == env.DEFAULT_VARIANT && startsWith(github.ref, 'refs/tags/') }}
|
||||||
type=raw,value=dev,enable=${{ matrix.variant == env.DEFAULT_VARIANT && github.ref_name == 'dev' }}
|
type=raw,value=latest,suffix=-${{ matrix.variant }},enable=${{ startsWith(github.ref, 'refs/tags/') }}
|
||||||
type=raw,value=latest,suffix=-${{ matrix.variant }},enable={{ is_default_branch }}
|
|
||||||
type=ref,event=branch,suffix=-${{ matrix.variant }}
|
type=ref,event=branch,suffix=-${{ matrix.variant }}
|
||||||
type=ref,event=branch,enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
type=ref,event=branch,enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||||
|
|
||||||
type=ref,event=pr,suffix=-${{ matrix.variant }}
|
type=ref,event=pr,suffix=-${{ matrix.variant }}
|
||||||
type=ref,event=pr,enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
type=ref,event=pr,enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||||
|
|
||||||
type=semver,pattern={{ version }},suffix=-${{ matrix.variant }}
|
type=semver,pattern={{ version }},suffix=-${{ matrix.variant }}
|
||||||
type=semver,pattern={{ version }},enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
type=semver,pattern={{ version }},enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||||
|
|
||||||
type=semver,pattern={{ major }}.{{ minor }},suffix=-${{ matrix.variant }}
|
type=semver,pattern={{ major }}.{{ minor }},suffix=-${{ matrix.variant }}
|
||||||
type=semver,pattern={{ major }}.{{ minor }},enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
type=semver,pattern={{ major }}.{{ minor }},enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||||
|
|
||||||
type=sha,suffix=-${{ matrix.variant }}
|
type=sha,suffix=-${{ matrix.variant }}
|
||||||
type=sha,enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
type=sha,enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||||
|
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -84,9 +84,6 @@ target/
|
|||||||
profile_default/
|
profile_default/
|
||||||
ipython_config.py
|
ipython_config.py
|
||||||
|
|
||||||
# pyenv
|
|
||||||
.python-version
|
|
||||||
|
|
||||||
# pipenv
|
# pipenv
|
||||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
|||||||
1
.python-version
Normal file
1
.python-version
Normal file
@@ -0,0 +1 @@
|
|||||||
|
3.13
|
||||||
4
.vscode/launch.json
vendored
4
.vscode/launch.json
vendored
@@ -6,7 +6,7 @@
|
|||||||
"configurations": [
|
"configurations": [
|
||||||
{
|
{
|
||||||
"name": "Python: Main",
|
"name": "Python: Main",
|
||||||
"type": "python",
|
"type": "debugpy",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"program": "main.py",
|
"program": "main.py",
|
||||||
"console": "integratedTerminal",
|
"console": "integratedTerminal",
|
||||||
@@ -14,7 +14,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "Pytest",
|
"name": "Pytest",
|
||||||
"type": "python",
|
"type": "debugpy",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"module": "pytest",
|
"module": "pytest",
|
||||||
"args": [
|
"args": [
|
||||||
|
|||||||
@@ -1,53 +1,107 @@
|
|||||||
FROM python:3.11-alpine
|
FROM ghcr.io/astral-sh/uv:python3.13-alpine
|
||||||
|
|
||||||
ENV DRYRUN 'True'
|
ENV PUID=1000
|
||||||
ENV DEBUG 'True'
|
ENV PGID=1000
|
||||||
ENV DEBUG_LEVEL 'INFO'
|
ENV GOSU_VERSION=1.17
|
||||||
ENV RUN_ONLY_ONCE 'False'
|
|
||||||
ENV SLEEP_DURATION '3600'
|
|
||||||
ENV LOGFILE 'log.log'
|
|
||||||
ENV MARKFILE 'mark.log'
|
|
||||||
|
|
||||||
ENV USER_MAPPING ''
|
RUN apk add --no-cache tini dos2unix
|
||||||
ENV LIBRARY_MAPPING ''
|
|
||||||
|
|
||||||
ENV PLEX_BASEURL ''
|
# Install gosu
|
||||||
ENV PLEX_TOKEN ''
|
RUN set -eux; \
|
||||||
ENV PLEX_USERNAME ''
|
\
|
||||||
ENV PLEX_PASSWORD ''
|
apk add --no-cache --virtual .gosu-deps \
|
||||||
ENV PLEX_SERVERNAME ''
|
ca-certificates \
|
||||||
|
dpkg \
|
||||||
ENV JELLYFIN_BASEURL ''
|
gnupg \
|
||||||
ENV JELLYFIN_TOKEN ''
|
; \
|
||||||
|
\
|
||||||
ENV SYNC_FROM_PLEX_TO_JELLYFIN 'True'
|
dpkgArch="$(dpkg --print-architecture | awk -F- '{ print $NF }')"; \
|
||||||
ENV SYNC_FROM_JELLYFIN_TO_PLEX 'True'
|
wget -O /usr/local/bin/gosu "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$dpkgArch"; \
|
||||||
ENV SYNC_FROM_PLEX_TO_PLEX 'True'
|
wget -O /usr/local/bin/gosu.asc "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$dpkgArch.asc"; \
|
||||||
ENV SYNC_FROM_JELLYFIN_TO_JELLYFIN 'True'
|
\
|
||||||
|
# verify the signature
|
||||||
ENV BLACKLIST_LIBRARY ''
|
export GNUPGHOME="$(mktemp -d)"; \
|
||||||
ENV WHITELIST_LIBRARY ''
|
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys B42F6819007F00F88E364FD4036A9C25BF357DD4; \
|
||||||
ENV BLACKLIST_LIBRARY_TYPE ''
|
gpg --batch --verify /usr/local/bin/gosu.asc /usr/local/bin/gosu; \
|
||||||
ENV WHITELIST_LIBRARY_TYPE ''
|
gpgconf --kill all; \
|
||||||
ENV BLACKLIST_USERS ''
|
rm -rf "$GNUPGHOME" /usr/local/bin/gosu.asc; \
|
||||||
ENV WHITELIST_USERS ''
|
\
|
||||||
|
# clean up fetch dependencies
|
||||||
|
apk del --no-network .gosu-deps; \
|
||||||
RUN apk add --no-cache tini && \
|
\
|
||||||
addgroup --system jellyplex_user && \
|
chmod +x /usr/local/bin/gosu; \
|
||||||
adduser --system --no-create-home jellyplex_user --ingroup jellyplex_user && \
|
# verify that the binary works
|
||||||
mkdir -p /app && \
|
gosu --version; \
|
||||||
chown -R jellyplex_user:jellyplex_user /app
|
gosu nobody true
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY --chown=jellyplex_user:jellyplex_user ./requirements.txt ./
|
# Enable bytecode compilation
|
||||||
|
ENV UV_COMPILE_BYTECODE=1
|
||||||
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
ENV UV_LINK_MODE=copy
|
||||||
|
|
||||||
COPY --chown=jellyplex_user:jellyplex_user . .
|
# Set the cache directory to /tmp instead of root
|
||||||
|
ENV UV_CACHE_DIR=/tmp/.cache/uv
|
||||||
|
|
||||||
USER jellyplex_user
|
# Install the project's dependencies using the lockfile and settings
|
||||||
|
RUN --mount=type=cache,target=/tmp/.cache/uv \
|
||||||
|
--mount=type=bind,source=uv.lock,target=uv.lock \
|
||||||
|
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||||
|
uv sync --frozen --no-install-project --no-dev
|
||||||
|
|
||||||
ENTRYPOINT ["/sbin/tini", "--"]
|
# Then, add the rest of the project source code and install it
|
||||||
|
# Installing separately from its dependencies allows optimal layer caching
|
||||||
|
COPY . /app
|
||||||
|
RUN --mount=type=cache,target=/tmp/.cache/uv \
|
||||||
|
uv sync --frozen --no-dev
|
||||||
|
|
||||||
|
# Place executables in the environment at the front of the path
|
||||||
|
ENV PATH="/app/.venv/bin:$PATH"
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN chmod +x *.sh && \
|
||||||
|
dos2unix *.sh
|
||||||
|
|
||||||
|
# Set default values to prevent issues
|
||||||
|
ENV DRYRUN="True"
|
||||||
|
ENV DEBUG_LEVEL="INFO"
|
||||||
|
ENV RUN_ONLY_ONCE="False"
|
||||||
|
ENV SLEEP_DURATION=3600
|
||||||
|
ENV LOG_FILE="log.log"
|
||||||
|
ENV MARK_FILE="mark.log"
|
||||||
|
ENV REQUEST_TIME=300
|
||||||
|
ENV GENERATE_GUIDS="True"
|
||||||
|
ENV GENERATE_LOCATIONS="True"
|
||||||
|
ENV MAX_THREADS=1
|
||||||
|
ENV USER_MAPPING=""
|
||||||
|
ENV LIBRARY_MAPPING=""
|
||||||
|
ENV BLACKLIST_LIBRARY=""
|
||||||
|
ENV WHITELIST_LIBRARY=""
|
||||||
|
ENV BLACKLIST_LIBRARY_TYPE=""
|
||||||
|
ENV WHITELIST_LIBRARY_TYPE=""
|
||||||
|
ENV BLACKLIST_USERS=""
|
||||||
|
ENV WHITELIST_USERS=""
|
||||||
|
ENV PLEX_BASEURL=""
|
||||||
|
ENV PLEX_TOKEN=""
|
||||||
|
ENV PLEX_USERNAME=""
|
||||||
|
ENV PLEX_PASSWORD=""
|
||||||
|
ENV PLEX_SERVERNAME=""
|
||||||
|
ENV SSL_BYPASS="False"
|
||||||
|
ENV JELLYFIN_BASEURL=""
|
||||||
|
ENV JELLYFIN_TOKEN=""
|
||||||
|
ENV EMBY_BASEURL=""
|
||||||
|
ENV EMBY_TOKEN=""
|
||||||
|
ENV SYNC_FROM_PLEX_TO_JELLYFIN="True"
|
||||||
|
ENV SYNC_FROM_PLEX_TO_PLEX="True"
|
||||||
|
ENV SYNC_FROM_PLEX_TO_EMBY="True"
|
||||||
|
ENV SYNC_FROM_JELLYFIN_TO_PLEX="True"
|
||||||
|
ENV SYNC_FROM_JELLYFIN_TO_JELLYFIN="True"
|
||||||
|
ENV SYNC_FROM_JELLYFIN_TO_EMBY="True"
|
||||||
|
ENV SYNC_FROM_EMBY_TO_PLEX="True"
|
||||||
|
ENV SYNC_FROM_EMBY_TO_JELLYFIN="True"
|
||||||
|
ENV SYNC_FROM_EMBY_TO_EMBY="True"
|
||||||
|
|
||||||
|
ENTRYPOINT ["tini", "--", "/app/entrypoint.sh"]
|
||||||
CMD ["python", "-u", "main.py"]
|
CMD ["python", "-u", "main.py"]
|
||||||
|
|||||||
113
Dockerfile.slim
113
Dockerfile.slim
@@ -1,56 +1,81 @@
|
|||||||
FROM python:3.11-slim
|
FROM ghcr.io/astral-sh/uv:bookworm-slim
|
||||||
|
|
||||||
ENV DRYRUN 'True'
|
|
||||||
ENV DEBUG 'True'
|
|
||||||
ENV DEBUG_LEVEL 'INFO'
|
|
||||||
ENV RUN_ONLY_ONCE 'False'
|
|
||||||
ENV SLEEP_DURATION '3600'
|
|
||||||
ENV LOGFILE 'log.log'
|
|
||||||
ENV MARKFILE 'mark.log'
|
|
||||||
|
|
||||||
ENV USER_MAPPING ''
|
|
||||||
ENV LIBRARY_MAPPING ''
|
|
||||||
|
|
||||||
ENV PLEX_BASEURL ''
|
|
||||||
ENV PLEX_TOKEN ''
|
|
||||||
ENV PLEX_USERNAME ''
|
|
||||||
ENV PLEX_PASSWORD ''
|
|
||||||
ENV PLEX_SERVERNAME ''
|
|
||||||
|
|
||||||
ENV JELLYFIN_BASEURL ''
|
|
||||||
ENV JELLYFIN_TOKEN ''
|
|
||||||
|
|
||||||
ENV SYNC_FROM_PLEX_TO_JELLYFIN 'True'
|
|
||||||
ENV SYNC_FROM_JELLYFIN_TO_PLEX 'True'
|
|
||||||
ENV SYNC_FROM_PLEX_TO_PLEX 'True'
|
|
||||||
ENV SYNC_FROM_JELLYFIN_TO_JELLYFIN 'True'
|
|
||||||
|
|
||||||
ENV BLACKLIST_LIBRARY ''
|
|
||||||
ENV WHITELIST_LIBRARY ''
|
|
||||||
ENV BLACKLIST_LIBRARY_TYPE ''
|
|
||||||
ENV WHITELIST_LIBRARY_TYPE ''
|
|
||||||
ENV BLACKLIST_USERS ''
|
|
||||||
ENV WHITELIST_USERS ''
|
|
||||||
|
|
||||||
|
ENV PUID=1000
|
||||||
|
ENV PGID=1000
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install tini --yes --no-install-recommends && \
|
apt-get install tini gosu dos2unix --yes --no-install-recommends && \
|
||||||
apt-get clean && \
|
apt-get clean && \
|
||||||
rm -rf /var/lib/apt/lists/* && \
|
rm -rf /var/lib/apt/lists/*
|
||||||
addgroup --system jellyplex_user && \
|
|
||||||
adduser --system --no-create-home jellyplex_user --ingroup jellyplex_user && \
|
|
||||||
mkdir -p /app && \
|
|
||||||
chown -R jellyplex_user:jellyplex_user /app
|
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY --chown=jellyplex_user:jellyplex_user ./requirements.txt ./
|
# Enable bytecode compilation
|
||||||
|
ENV UV_COMPILE_BYTECODE=1
|
||||||
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
ENV UV_LINK_MODE=copy
|
||||||
|
|
||||||
COPY --chown=jellyplex_user:jellyplex_user . .
|
# Set the cache directory to /tmp instead of root
|
||||||
|
ENV UV_CACHE_DIR=/tmp/.cache/uv
|
||||||
|
|
||||||
USER jellyplex_user
|
ENV UV_PYTHON_INSTALL_DIR=/app/.bin
|
||||||
|
|
||||||
ENTRYPOINT ["/bin/tini", "--"]
|
# Install the project's dependencies using the lockfile and settings
|
||||||
|
RUN --mount=type=cache,target=/tmp/.cache/uv \
|
||||||
|
--mount=type=bind,source=uv.lock,target=uv.lock \
|
||||||
|
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||||
|
uv sync --frozen --no-install-project --no-dev
|
||||||
|
|
||||||
|
# Then, add the rest of the project source code and install it
|
||||||
|
# Installing separately from its dependencies allows optimal layer caching
|
||||||
|
COPY . /app
|
||||||
|
RUN --mount=type=cache,target=/tmp/.cache/uv \
|
||||||
|
uv sync --frozen --no-dev
|
||||||
|
|
||||||
|
# Place executables in the environment at the front of the path
|
||||||
|
ENV PATH="/app/.venv/bin:$PATH"
|
||||||
|
|
||||||
|
RUN chmod +x *.sh && \
|
||||||
|
dos2unix *.sh
|
||||||
|
|
||||||
|
# Set default values to prevent issues
|
||||||
|
ENV DRYRUN="True"
|
||||||
|
ENV DEBUG_LEVEL="INFO"
|
||||||
|
ENV RUN_ONLY_ONCE="False"
|
||||||
|
ENV SLEEP_DURATION=3600
|
||||||
|
ENV LOG_FILE="log.log"
|
||||||
|
ENV MARK_FILE="mark.log"
|
||||||
|
ENV REQUEST_TIME=300
|
||||||
|
ENV GENERATE_GUIDS="True"
|
||||||
|
ENV GENERATE_LOCATIONS="True"
|
||||||
|
ENV MAX_THREADS=1
|
||||||
|
ENV USER_MAPPING=""
|
||||||
|
ENV LIBRARY_MAPPING=""
|
||||||
|
ENV BLACKLIST_LIBRARY=""
|
||||||
|
ENV WHITELIST_LIBRARY=""
|
||||||
|
ENV BLACKLIST_LIBRARY_TYPE=""
|
||||||
|
ENV WHITELIST_LIBRARY_TYPE=""
|
||||||
|
ENV BLACKLIST_USERS=""
|
||||||
|
ENV WHITELIST_USERS=""
|
||||||
|
ENV PLEX_BASEURL=""
|
||||||
|
ENV PLEX_TOKEN=""
|
||||||
|
ENV PLEX_USERNAME=""
|
||||||
|
ENV PLEX_PASSWORD=""
|
||||||
|
ENV PLEX_SERVERNAME=""
|
||||||
|
ENV SSL_BYPASS="False"
|
||||||
|
ENV JELLYFIN_BASEURL=""
|
||||||
|
ENV JELLYFIN_TOKEN=""
|
||||||
|
ENV EMBY_BASEURL=""
|
||||||
|
ENV EMBY_TOKEN=""
|
||||||
|
ENV SYNC_FROM_PLEX_TO_JELLYFIN="True"
|
||||||
|
ENV SYNC_FROM_PLEX_TO_PLEX="True"
|
||||||
|
ENV SYNC_FROM_PLEX_TO_EMBY="True"
|
||||||
|
ENV SYNC_FROM_JELLYFIN_TO_PLEX="True"
|
||||||
|
ENV SYNC_FROM_JELLYFIN_TO_JELLYFIN="True"
|
||||||
|
ENV SYNC_FROM_JELLYFIN_TO_EMBY="True"
|
||||||
|
ENV SYNC_FROM_EMBY_TO_PLEX="True"
|
||||||
|
ENV SYNC_FROM_EMBY_TO_JELLYFIN="True"
|
||||||
|
ENV SYNC_FROM_EMBY_TO_EMBY="True"
|
||||||
|
|
||||||
|
ENTRYPOINT ["/bin/tini", "--", "/app/entrypoint.sh"]
|
||||||
CMD ["python", "-u", "main.py"]
|
CMD ["python", "-u", "main.py"]
|
||||||
|
|||||||
112
README.md
112
README.md
@@ -1,44 +1,44 @@
|
|||||||
# JellyPlex-Watched
|
# JellyPlex-Watched
|
||||||
|
|
||||||
[](https://www.codacy.com/gh/luigi311/JellyPlex-Watched/dashboard?utm_source=github.com\&utm_medium=referral\&utm_content=luigi311/JellyPlex-Watched\&utm_campaign=Badge_Grade)
|
[](https://www.codacy.com/gh/luigi311/JellyPlex-Watched/dashboard?utm_source=github.com&utm_medium=referral&utm_content=luigi311/JellyPlex-Watched&utm_campaign=Badge_Grade)
|
||||||
|
|
||||||
Sync watched between jellyfin and plex locally
|
Sync watched between jellyfin, plex and emby locally
|
||||||
|
|
||||||
## Description
|
## Description
|
||||||
|
|
||||||
Keep in sync all your users watched history between jellyfin and plex servers locally. This uses file names and provider ids to find the correct episode/movie between the two. This is not perfect but it works for most cases. You can use this for as many servers as you want by entering multiple options in the .env plex/jellyfin section separated by commas.
|
Keep in sync all your users watched history between jellyfin, plex and emby servers locally. This uses file names and provider ids to find the correct episode/movie between the two. This is not perfect but it works for most cases. You can use this for as many servers as you want by entering multiple options in the .env plex/jellyfin section separated by commas.
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
### Plex
|
### Plex
|
||||||
|
|
||||||
* \[x] Match via filenames
|
- \[x] Match via filenames
|
||||||
* \[x] Match via provider ids
|
- \[x] Match via provider ids
|
||||||
* \[x] Map usernames
|
- \[x] Map usernames
|
||||||
* \[x] Use single login
|
- \[x] Use single login
|
||||||
* \[x] One way/multi way sync
|
- \[x] One way/multi way sync
|
||||||
* \[x] Sync watched
|
- \[x] Sync watched
|
||||||
* \[x] Sync in progress
|
- \[x] Sync in progress
|
||||||
|
|
||||||
### Jellyfin
|
### Jellyfin
|
||||||
|
|
||||||
* \[x] Match via filenames
|
- \[x] Match via filenames
|
||||||
* \[x] Match via provider ids
|
- \[x] Match via provider ids
|
||||||
* \[x] Map usernames
|
- \[x] Map usernames
|
||||||
* \[x] Use single login
|
- \[x] Use single login
|
||||||
* \[x] One way/multi way sync
|
- \[x] One way/multi way sync
|
||||||
* \[x] Sync watched
|
- \[x] Sync watched
|
||||||
* \[ ] Sync in progress
|
- \[x] Sync in progress
|
||||||
|
|
||||||
### Emby
|
### Emby
|
||||||
|
|
||||||
* \[ ] Match via filenames
|
- \[x] Match via filenames
|
||||||
* \[ ] Match via provider ids
|
- \[x] Match via provider ids
|
||||||
* \[ ] Map usernames
|
- \[x] Map usernames
|
||||||
* \[ ] Use single login
|
- \[x] Use single login
|
||||||
* \[ ] One way/multi way sync
|
- \[x] One way/multi way sync
|
||||||
* \[ ] Sync watched
|
- \[x] Sync watched
|
||||||
* \[ ] Sync in progress
|
- \[x] Sync in progress
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
@@ -48,66 +48,66 @@ Full list of configuration options can be found in the [.env.sample](.env.sample
|
|||||||
|
|
||||||
### Baremetal
|
### Baremetal
|
||||||
|
|
||||||
* Setup virtualenv of your choice
|
- Setup virtualenv of your choice
|
||||||
|
|
||||||
* Install dependencies
|
- Install dependencies
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
pip install -r requirements.txt
|
pip install -r requirements.txt
|
||||||
```
|
```
|
||||||
|
|
||||||
* Create a .env file similar to .env.sample, uncomment whitelist and blacklist if needed, fill in baseurls and tokens
|
- Create a .env file similar to .env.sample, uncomment whitelist and blacklist if needed, fill in baseurls and tokens
|
||||||
|
|
||||||
* Run
|
- Run
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
python main.py
|
python main.py
|
||||||
```
|
```
|
||||||
|
|
||||||
### Docker
|
### Docker
|
||||||
|
|
||||||
* Build docker image
|
- Build docker image
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker build -t jellyplex-watched .
|
docker build -t jellyplex-watched .
|
||||||
```
|
```
|
||||||
|
|
||||||
* or use pre-built image
|
- or use pre-built image
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker pull luigi311/jellyplex-watched:latest
|
docker pull luigi311/jellyplex-watched:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
#### With variables
|
#### With variables
|
||||||
|
|
||||||
* Run
|
- Run
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run --rm -it -e PLEX_TOKEN='SuperSecretToken' luigi311/jellyplex-watched:latest
|
docker run --rm -it -e PLEX_TOKEN='SuperSecretToken' luigi311/jellyplex-watched:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
#### With .env
|
#### With .env
|
||||||
|
|
||||||
* Create a .env file similar to .env.sample and set the variables to match your setup
|
- Create a .env file similar to .env.sample and set the variables to match your setup
|
||||||
|
|
||||||
* Run
|
- Run
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run --rm -it -v "$(pwd)/.env:/app/.env" luigi311/jellyplex-watched:latest
|
docker run --rm -it -v "$(pwd)/.env:/app/.env" luigi311/jellyplex-watched:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
## Troubleshooting/Issues
|
## Troubleshooting/Issues
|
||||||
|
|
||||||
* Jellyfin
|
- Jellyfin
|
||||||
* Attempt to decode JSON with unexpected mimetype, make sure you enable remote access or add your docker subnet to lan networks in jellyfin settings
|
|
||||||
|
|
||||||
* Configuration
|
- Attempt to decode JSON with unexpected mimetype, make sure you enable remote access or add your docker subnet to lan networks in jellyfin settings
|
||||||
* Do not use quotes around variables in docker compose
|
|
||||||
|
|
||||||
|
- Configuration
|
||||||
|
- Do not use quotes around variables in docker compose
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
I am open to receiving pull requests. If you are submitting a pull request, please make sure run it locally for a day or two to make sure it is working as expected and stable. Make all pull requests against the dev branch and nothing will be merged into the main without going through the lower branches.
|
I am open to receiving pull requests. If you are submitting a pull request, please make sure run it locally for a day or two to make sure it is working as expected and stable.
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
|
|||||||
@@ -1,32 +1,11 @@
|
|||||||
version: '3'
|
# Sync watched status between media servers locally
|
||||||
|
|
||||||
services:
|
services:
|
||||||
jellyplex-watched:
|
jellyplex-watched:
|
||||||
image: luigi311/jellyplex-watched:latest
|
image: luigi311/jellyplex-watched:latest
|
||||||
container_name: jellyplex-watched
|
container_name: jellyplex-watched
|
||||||
restart: always
|
restart: unless-stopped
|
||||||
environment:
|
environment:
|
||||||
- DRYRUN=True
|
- PUID=1000
|
||||||
- DEBUG=True
|
- PGID=1000
|
||||||
- DEBUG_LEVEL=info
|
env_file: "./.env"
|
||||||
- RUN_ONLY_ONCE=False
|
|
||||||
- SLEEP_DURATION=3600
|
|
||||||
- LOGFILE=/tmp/log.log
|
|
||||||
- MARKFILE=/tmp/mark.log
|
|
||||||
- USER_MAPPING={"user1":"user2"}
|
|
||||||
- LIBRARY_MAPPING={"TV Shows":"Shows"}
|
|
||||||
- BLACKLIST_LIBRARY=
|
|
||||||
- WHITELIST_LIBRARY=
|
|
||||||
- BLACKLIST_LIBRARY_TYPE=
|
|
||||||
- WHITELIST_LIBRARY_TYPE=
|
|
||||||
- BLACKLIST_USERS=
|
|
||||||
- WHITELIST_USERS=
|
|
||||||
- PLEX_BASEURL=https://localhost:32400
|
|
||||||
- PLEX_TOKEN=plex_token
|
|
||||||
- JELLYFIN_BASEURL=http://localhost:8096
|
|
||||||
- JELLYFIN_TOKEN=jelly_token
|
|
||||||
- SSL_BYPASS=True
|
|
||||||
- SYNC_FROM_PLEX_TO_JELLYFIN=True
|
|
||||||
- SYNC_FROM_JELLYFIN_TO_PLEX=True
|
|
||||||
- SYNC_FROM_PLEX_TO_PLEX=True
|
|
||||||
- SYNC_FROM_JELLYFIN_TO_JELLYFIN=True
|
|
||||||
|
|||||||
62
entrypoint.sh
Normal file
62
entrypoint.sh
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
#!/usr/bin/env sh
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Check if user is root
|
||||||
|
if [ "$(id -u)" = '0' ]; then
|
||||||
|
echo "User is root, checking if we need to create a user and group based on environment variables"
|
||||||
|
# Create group and user based on environment variables
|
||||||
|
if [ ! "$(getent group "$PGID")" ]; then
|
||||||
|
# If groupadd exists, use it
|
||||||
|
if command -v groupadd > /dev/null; then
|
||||||
|
groupadd -g "$PGID" jellyplex_watched
|
||||||
|
elif command -v addgroup > /dev/null; then
|
||||||
|
addgroup -g "$PGID" jellyplex_watched
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If user id does not exist, create the user
|
||||||
|
if [ ! "$(getent passwd "$PUID")" ]; then
|
||||||
|
if command -v useradd > /dev/null; then
|
||||||
|
useradd --no-create-home -u "$PUID" -g "$PGID" jellyplex_watched
|
||||||
|
elif command -v adduser > /dev/null; then
|
||||||
|
# Get the group name based on the PGID since adduser does not have a flag to specify the group id
|
||||||
|
# and if the group id already exists the group name will be sommething unexpected
|
||||||
|
GROUPNAME=$(getent group "$PGID" | cut -d: -f1)
|
||||||
|
|
||||||
|
# Use alpine busybox adduser syntax
|
||||||
|
adduser -D -H -u "$PUID" -G "$GROUPNAME" jellyplex_watched
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# If user is not root, set the PUID and PGID to the current user
|
||||||
|
PUID=$(id -u)
|
||||||
|
PGID=$(id -g)
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get directory of log and mark file to create base folder if it doesnt exist
|
||||||
|
LOG_DIR=$(dirname "$LOG_FILE")
|
||||||
|
# If LOG_DIR is set, create the directory
|
||||||
|
if [ -n "$LOG_DIR" ]; then
|
||||||
|
mkdir -p "$LOG_DIR"
|
||||||
|
fi
|
||||||
|
|
||||||
|
MARK_DIR=$(dirname "$MARK_FILE")
|
||||||
|
if [ -n "$MARK_DIR" ]; then
|
||||||
|
mkdir -p "$MARK_DIR"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Starting JellyPlex-Watched with UID: $PUID and GID: $PGID"
|
||||||
|
|
||||||
|
# If root run as the created user
|
||||||
|
if [ "$(id -u)" = '0' ]; then
|
||||||
|
chown -R "$PUID:$PGID" /app/.venv
|
||||||
|
chown -R "$PUID:$PGID" "$LOG_DIR"
|
||||||
|
chown -R "$PUID:$PGID" "$MARK_DIR"
|
||||||
|
|
||||||
|
# Run the application as the created user
|
||||||
|
exec gosu "$PUID:$PGID" "$@"
|
||||||
|
else
|
||||||
|
# Run the application as the current user
|
||||||
|
exec "$@"
|
||||||
|
fi
|
||||||
6
main.py
6
main.py
@@ -1,9 +1,9 @@
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
# Check python version 3.9 or higher
|
# Check python version 3.12 or higher
|
||||||
if not (3, 9) <= tuple(map(int, sys.version_info[:2])):
|
if not (3, 12) <= tuple(map(int, sys.version_info[:2])):
|
||||||
print("This script requires Python 3.9 or higher")
|
print("This script requires Python 3.12 or higher")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
from src.main import main
|
from src.main import main
|
||||||
|
|||||||
22
pyproject.toml
Normal file
22
pyproject.toml
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
[project]
|
||||||
|
name = "jellyplex-watched"
|
||||||
|
version = "6.1.2"
|
||||||
|
description = "Sync watched between media servers locally"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = [
|
||||||
|
"loguru>=0.7.3",
|
||||||
|
"packaging==24.2",
|
||||||
|
"plexapi==4.16.1",
|
||||||
|
"pydantic==2.10.6",
|
||||||
|
"python-dotenv==1.0.0",
|
||||||
|
"requests==2.32.3",
|
||||||
|
]
|
||||||
|
|
||||||
|
[dependency-groups]
|
||||||
|
lint = [
|
||||||
|
"ruff>=0.9.6",
|
||||||
|
]
|
||||||
|
dev = [
|
||||||
|
"pytest>=8.3.4",
|
||||||
|
]
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
PlexAPI==4.15.7
|
|
||||||
requests==2.31.0
|
|
||||||
python-dotenv==1.0.0
|
|
||||||
@@ -1,15 +1,17 @@
|
|||||||
from src.functions import logger, search_mapping
|
from loguru import logger
|
||||||
|
|
||||||
|
from src.functions import search_mapping
|
||||||
|
|
||||||
|
|
||||||
def setup_black_white_lists(
|
def setup_black_white_lists(
|
||||||
blacklist_library: str,
|
blacklist_library: list[str] | None,
|
||||||
whitelist_library: str,
|
whitelist_library: list[str] | None,
|
||||||
blacklist_library_type: str,
|
blacklist_library_type: list[str] | None,
|
||||||
whitelist_library_type: str,
|
whitelist_library_type: list[str] | None,
|
||||||
blacklist_users: str,
|
blacklist_users: list[str] | None,
|
||||||
whitelist_users: str,
|
whitelist_users: list[str] | None,
|
||||||
library_mapping=None,
|
library_mapping: dict[str, str] | None = None,
|
||||||
user_mapping=None,
|
user_mapping: dict[str, str] | None = None,
|
||||||
):
|
):
|
||||||
blacklist_library, blacklist_library_type, blacklist_users = setup_x_lists(
|
blacklist_library, blacklist_library_type, blacklist_users = setup_x_lists(
|
||||||
blacklist_library,
|
blacklist_library,
|
||||||
@@ -40,53 +42,44 @@ def setup_black_white_lists(
|
|||||||
|
|
||||||
|
|
||||||
def setup_x_lists(
|
def setup_x_lists(
|
||||||
xlist_library,
|
xlist_library: list[str] | None,
|
||||||
xlist_library_type,
|
xlist_library_type: list[str] | None,
|
||||||
xlist_users,
|
xlist_users: list[str] | None,
|
||||||
xlist_type,
|
xlist_type: str | None,
|
||||||
library_mapping=None,
|
library_mapping: dict[str, str] | None = None,
|
||||||
user_mapping=None,
|
user_mapping: dict[str, str] | None = None,
|
||||||
):
|
) -> tuple[list[str], list[str], list[str]]:
|
||||||
|
out_library: list[str] = []
|
||||||
if xlist_library:
|
if xlist_library:
|
||||||
if len(xlist_library) > 0:
|
out_library = [x.strip() for x in xlist_library]
|
||||||
xlist_library = xlist_library.split(",")
|
if library_mapping:
|
||||||
xlist_library = [x.strip() for x in xlist_library]
|
temp_library: list[str] = []
|
||||||
if library_mapping:
|
for library in xlist_library:
|
||||||
temp_library = []
|
library_other = search_mapping(library_mapping, library)
|
||||||
for library in xlist_library:
|
if library_other:
|
||||||
library_other = search_mapping(library_mapping, library)
|
temp_library.append(library_other)
|
||||||
if library_other:
|
|
||||||
temp_library.append(library_other)
|
|
||||||
|
|
||||||
xlist_library = xlist_library + temp_library
|
out_library = out_library + temp_library
|
||||||
else:
|
logger.info(f"{xlist_type}list Library: {xlist_library}")
|
||||||
xlist_library = []
|
|
||||||
logger(f"{xlist_type}list Library: {xlist_library}", 1)
|
|
||||||
|
|
||||||
|
out_library_type: list[str] = []
|
||||||
if xlist_library_type:
|
if xlist_library_type:
|
||||||
if len(xlist_library_type) > 0:
|
out_library_type = [x.lower().strip() for x in xlist_library_type]
|
||||||
xlist_library_type = xlist_library_type.split(",")
|
|
||||||
xlist_library_type = [x.lower().strip() for x in xlist_library_type]
|
|
||||||
else:
|
|
||||||
xlist_library_type = []
|
|
||||||
logger(f"{xlist_type}list Library Type: {xlist_library_type}", 1)
|
|
||||||
|
|
||||||
|
logger.info(f"{xlist_type}list Library Type: {out_library_type}")
|
||||||
|
|
||||||
|
out_users: list[str] = []
|
||||||
if xlist_users:
|
if xlist_users:
|
||||||
if len(xlist_users) > 0:
|
out_users = [x.lower().strip() for x in xlist_users]
|
||||||
xlist_users = xlist_users.split(",")
|
if user_mapping:
|
||||||
xlist_users = [x.lower().strip() for x in xlist_users]
|
temp_users: list[str] = []
|
||||||
if user_mapping:
|
for user in out_users:
|
||||||
temp_users = []
|
user_other = search_mapping(user_mapping, user)
|
||||||
for user in xlist_users:
|
if user_other:
|
||||||
user_other = search_mapping(user_mapping, user)
|
temp_users.append(user_other)
|
||||||
if user_other:
|
|
||||||
temp_users.append(user_other)
|
|
||||||
|
|
||||||
xlist_users = xlist_users + temp_users
|
out_users = out_users + temp_users
|
||||||
else:
|
|
||||||
xlist_users = []
|
|
||||||
else:
|
|
||||||
xlist_users = []
|
|
||||||
logger(f"{xlist_type}list Users: {xlist_users}", 1)
|
|
||||||
|
|
||||||
return xlist_library, xlist_library_type, xlist_users
|
logger.info(f"{xlist_type}list Users: {out_users}")
|
||||||
|
|
||||||
|
return out_library, out_library_type, out_users
|
||||||
|
|||||||
122
src/connection.py
Normal file
122
src/connection.py
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
import os
|
||||||
|
from typing import Literal
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from src.functions import str_to_bool
|
||||||
|
from src.plex import Plex
|
||||||
|
from src.jellyfin import Jellyfin
|
||||||
|
from src.emby import Emby
|
||||||
|
|
||||||
|
load_dotenv(override=True)
|
||||||
|
|
||||||
|
|
||||||
|
def jellyfin_emby_server_connection(
|
||||||
|
server_baseurl: str, server_token: str, server_type: Literal["jellyfin", "emby"]
|
||||||
|
) -> list[Jellyfin | Emby]:
|
||||||
|
servers: list[Jellyfin | Emby] = []
|
||||||
|
server: Jellyfin | Emby
|
||||||
|
|
||||||
|
server_baseurls = server_baseurl.split(",")
|
||||||
|
server_tokens = server_token.split(",")
|
||||||
|
|
||||||
|
if len(server_baseurls) != len(server_tokens):
|
||||||
|
raise Exception(
|
||||||
|
f"{server_type.upper()}_BASEURL and {server_type.upper()}_TOKEN must have the same number of entries"
|
||||||
|
)
|
||||||
|
|
||||||
|
for i, baseurl in enumerate(server_baseurls):
|
||||||
|
baseurl = baseurl.strip()
|
||||||
|
if baseurl[-1] == "/":
|
||||||
|
baseurl = baseurl[:-1]
|
||||||
|
|
||||||
|
if server_type == "jellyfin":
|
||||||
|
server = Jellyfin(baseurl=baseurl, token=server_tokens[i].strip())
|
||||||
|
servers.append(server)
|
||||||
|
|
||||||
|
elif server_type == "emby":
|
||||||
|
server = Emby(baseurl=baseurl, token=server_tokens[i].strip())
|
||||||
|
servers.append(server)
|
||||||
|
else:
|
||||||
|
raise Exception("Unknown server type")
|
||||||
|
|
||||||
|
logger.debug(f"{server_type} Server {i} info: {server.info()}")
|
||||||
|
|
||||||
|
return servers
|
||||||
|
|
||||||
|
|
||||||
|
def generate_server_connections() -> list[Plex | Jellyfin | Emby]:
|
||||||
|
servers: list[Plex | Jellyfin | Emby] = []
|
||||||
|
|
||||||
|
plex_baseurl_str: str | None = os.getenv("PLEX_BASEURL", None)
|
||||||
|
plex_token_str: str | None = os.getenv("PLEX_TOKEN", None)
|
||||||
|
plex_username_str: str | None = os.getenv("PLEX_USERNAME", None)
|
||||||
|
plex_password_str: str | None = os.getenv("PLEX_PASSWORD", None)
|
||||||
|
plex_servername_str: str | None = os.getenv("PLEX_SERVERNAME", None)
|
||||||
|
ssl_bypass = str_to_bool(os.getenv("SSL_BYPASS", "False"))
|
||||||
|
|
||||||
|
if plex_baseurl_str and plex_token_str:
|
||||||
|
plex_baseurl = plex_baseurl_str.split(",")
|
||||||
|
plex_token = plex_token_str.split(",")
|
||||||
|
|
||||||
|
if len(plex_baseurl) != len(plex_token):
|
||||||
|
raise Exception(
|
||||||
|
"PLEX_BASEURL and PLEX_TOKEN must have the same number of entries"
|
||||||
|
)
|
||||||
|
|
||||||
|
for i, url in enumerate(plex_baseurl):
|
||||||
|
server = Plex(
|
||||||
|
baseurl=url.strip(),
|
||||||
|
token=plex_token[i].strip(),
|
||||||
|
username=None,
|
||||||
|
password=None,
|
||||||
|
servername=None,
|
||||||
|
ssl_bypass=ssl_bypass,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(f"Plex Server {i} info: {server.info()}")
|
||||||
|
|
||||||
|
servers.append(server)
|
||||||
|
|
||||||
|
if plex_username_str and plex_password_str and plex_servername_str:
|
||||||
|
plex_username = plex_username_str.split(",")
|
||||||
|
plex_password = plex_password_str.split(",")
|
||||||
|
plex_servername = plex_servername_str.split(",")
|
||||||
|
|
||||||
|
if len(plex_username) != len(plex_password) or len(plex_username) != len(
|
||||||
|
plex_servername
|
||||||
|
):
|
||||||
|
raise Exception(
|
||||||
|
"PLEX_USERNAME, PLEX_PASSWORD and PLEX_SERVERNAME must have the same number of entries"
|
||||||
|
)
|
||||||
|
|
||||||
|
for i, username in enumerate(plex_username):
|
||||||
|
server = Plex(
|
||||||
|
baseurl=None,
|
||||||
|
token=None,
|
||||||
|
username=username.strip(),
|
||||||
|
password=plex_password[i].strip(),
|
||||||
|
servername=plex_servername[i].strip(),
|
||||||
|
ssl_bypass=ssl_bypass,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(f"Plex Server {i} info: {server.info()}")
|
||||||
|
servers.append(server)
|
||||||
|
|
||||||
|
jellyfin_baseurl = os.getenv("JELLYFIN_BASEURL", None)
|
||||||
|
jellyfin_token = os.getenv("JELLYFIN_TOKEN", None)
|
||||||
|
if jellyfin_baseurl and jellyfin_token:
|
||||||
|
servers.extend(
|
||||||
|
jellyfin_emby_server_connection(
|
||||||
|
jellyfin_baseurl, jellyfin_token, "jellyfin"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
emby_baseurl = os.getenv("EMBY_BASEURL", None)
|
||||||
|
emby_token = os.getenv("EMBY_TOKEN", None)
|
||||||
|
if emby_baseurl and emby_token:
|
||||||
|
servers.extend(
|
||||||
|
jellyfin_emby_server_connection(emby_baseurl, emby_token, "emby")
|
||||||
|
)
|
||||||
|
|
||||||
|
return servers
|
||||||
25
src/emby.py
Normal file
25
src/emby.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
from src.jellyfin_emby import JellyfinEmby
|
||||||
|
from packaging.version import parse, Version
|
||||||
|
|
||||||
|
|
||||||
|
class Emby(JellyfinEmby):
|
||||||
|
def __init__(self, baseurl, token):
|
||||||
|
authorization = (
|
||||||
|
"Emby , "
|
||||||
|
'Client="JellyPlex-Watched", '
|
||||||
|
'Device="script", '
|
||||||
|
'DeviceId="script", '
|
||||||
|
'Version="6.0.2"'
|
||||||
|
)
|
||||||
|
headers = {
|
||||||
|
"Accept": "application/json",
|
||||||
|
"X-Emby-Token": token,
|
||||||
|
"X-Emby-Authorization": authorization,
|
||||||
|
}
|
||||||
|
|
||||||
|
super().__init__(
|
||||||
|
server_type="Emby", baseurl=baseurl, token=token, headers=headers
|
||||||
|
)
|
||||||
|
|
||||||
|
def is_partial_update_supported(self, server_version: Version) -> bool:
|
||||||
|
return server_version > parse("4.4")
|
||||||
122
src/functions.py
122
src/functions.py
@@ -1,48 +1,23 @@
|
|||||||
import os
|
import os
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import Future, ThreadPoolExecutor
|
||||||
|
from typing import Any, Callable
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
load_dotenv(override=True)
|
load_dotenv(override=True)
|
||||||
|
|
||||||
logfile = os.getenv("LOGFILE", "log.log")
|
mark_file = os.getenv("MARK_FILE", os.getenv("MARKFILE", "mark.log"))
|
||||||
markfile = os.getenv("MARKFILE", "mark.log")
|
|
||||||
|
|
||||||
|
|
||||||
def logger(message: str, log_type=0):
|
|
||||||
debug = str_to_bool(os.getenv("DEBUG", "False"))
|
|
||||||
debug_level = os.getenv("DEBUG_LEVEL", "info").lower()
|
|
||||||
|
|
||||||
output = str(message)
|
|
||||||
if log_type == 0:
|
|
||||||
pass
|
|
||||||
elif log_type == 1 and (debug and debug_level in ("info", "debug")):
|
|
||||||
output = f"[INFO]: {output}"
|
|
||||||
elif log_type == 2:
|
|
||||||
output = f"[ERROR]: {output}"
|
|
||||||
elif log_type == 3 and (debug and debug_level == "debug"):
|
|
||||||
output = f"[DEBUG]: {output}"
|
|
||||||
elif log_type == 4:
|
|
||||||
output = f"[WARNING]: {output}"
|
|
||||||
elif log_type == 5:
|
|
||||||
output = f"[MARK]: {output}"
|
|
||||||
elif log_type == 6:
|
|
||||||
output = f"[DRYRUN]: {output}"
|
|
||||||
else:
|
|
||||||
output = None
|
|
||||||
|
|
||||||
if output is not None:
|
|
||||||
print(output)
|
|
||||||
file = open(logfile, "a", encoding="utf-8")
|
|
||||||
file.write(output + "\n")
|
|
||||||
|
|
||||||
|
|
||||||
def log_marked(
|
def log_marked(
|
||||||
username: str, library: str, movie_show: str, episode: str = None, duration=None
|
server_type: str,
|
||||||
|
server_name: str,
|
||||||
|
username: str,
|
||||||
|
library: str,
|
||||||
|
movie_show: str,
|
||||||
|
episode: str | None = None,
|
||||||
|
duration: float | None = None,
|
||||||
):
|
):
|
||||||
if markfile is None:
|
output = f"{server_type}/{server_name}/{username}/{library}/{movie_show}"
|
||||||
return
|
|
||||||
|
|
||||||
output = f"{username}/{library}/{movie_show}"
|
|
||||||
|
|
||||||
if episode:
|
if episode:
|
||||||
output += f"/{episode}"
|
output += f"/{episode}"
|
||||||
@@ -50,35 +25,20 @@ def log_marked(
|
|||||||
if duration:
|
if duration:
|
||||||
output += f"/{duration}"
|
output += f"/{duration}"
|
||||||
|
|
||||||
file = open(f"{markfile}", "a", encoding="utf-8")
|
with open(f"{mark_file}", "a", encoding="utf-8") as file:
|
||||||
file.write(output + "\n")
|
file.write(output + "\n")
|
||||||
|
|
||||||
|
|
||||||
# Reimplementation of distutils.util.strtobool due to it being deprecated
|
# Reimplementation of distutils.util.strtobool due to it being deprecated
|
||||||
# Source: https://github.com/PostHog/posthog/blob/01e184c29d2c10c43166f1d40a334abbc3f99d8a/posthog/utils.py#L668
|
# Source: https://github.com/PostHog/posthog/blob/01e184c29d2c10c43166f1d40a334abbc3f99d8a/posthog/utils.py#L668
|
||||||
def str_to_bool(value: any) -> bool:
|
def str_to_bool(value: str) -> bool:
|
||||||
if not value:
|
if not value:
|
||||||
return False
|
return False
|
||||||
return str(value).lower() in ("y", "yes", "t", "true", "on", "1")
|
return str(value).lower() in ("y", "yes", "t", "true", "on", "1")
|
||||||
|
|
||||||
|
|
||||||
# Search for nested element in list
|
|
||||||
def contains_nested(element, lst):
|
|
||||||
if lst is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
for i, item in enumerate(lst):
|
|
||||||
if item is None:
|
|
||||||
continue
|
|
||||||
if element in item:
|
|
||||||
return i
|
|
||||||
elif element == item:
|
|
||||||
return i
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
# Get mapped value
|
# Get mapped value
|
||||||
def search_mapping(dictionary: dict, key_value: str):
|
def search_mapping(dictionary: dict[str, str], key_value: str) -> str | None:
|
||||||
if key_value in dictionary.keys():
|
if key_value in dictionary.keys():
|
||||||
return dictionary[key_value]
|
return dictionary[key_value]
|
||||||
elif key_value.lower() in dictionary.keys():
|
elif key_value.lower() in dictionary.keys():
|
||||||
@@ -93,36 +53,66 @@ def search_mapping(dictionary: dict, key_value: str):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# Return list of objects that exist in both lists including mappings
|
||||||
|
def match_list(
|
||||||
|
list1: list[str], list2: list[str], list_mapping: dict[str, str] | None = None
|
||||||
|
) -> list[str]:
|
||||||
|
output: list[str] = []
|
||||||
|
for element in list1:
|
||||||
|
if element in list2:
|
||||||
|
output.append(element)
|
||||||
|
elif list_mapping:
|
||||||
|
element_other = search_mapping(list_mapping, element)
|
||||||
|
if element_other in list2:
|
||||||
|
output.append(element)
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
def future_thread_executor(
|
def future_thread_executor(
|
||||||
args: list, threads: int = None, override_threads: bool = False
|
args: list[tuple[Callable[..., Any], ...]],
|
||||||
):
|
threads: int | None = None,
|
||||||
futures_list = []
|
override_threads: bool = False,
|
||||||
results = []
|
) -> list[Any]:
|
||||||
|
results: list[Any] = []
|
||||||
|
|
||||||
workers = min(int(os.getenv("MAX_THREADS", 32)), os.cpu_count() * 2)
|
# Determine the number of workers, defaulting to 1 if os.cpu_count() returns None
|
||||||
if threads:
|
max_threads_env: int = int(os.getenv("MAX_THREADS", 32))
|
||||||
|
cpu_threads: int = os.cpu_count() or 1 # Default to 1 if os.cpu_count() is None
|
||||||
|
workers: int = min(max_threads_env, cpu_threads * 2)
|
||||||
|
|
||||||
|
# Adjust workers based on threads parameter and override_threads flag
|
||||||
|
if threads is not None:
|
||||||
workers = min(threads, workers)
|
workers = min(threads, workers)
|
||||||
|
|
||||||
if override_threads:
|
if override_threads:
|
||||||
workers = threads
|
workers = threads if threads is not None else workers
|
||||||
|
|
||||||
# If only one worker, run in main thread to avoid overhead
|
# If only one worker, run in main thread to avoid overhead
|
||||||
if workers == 1:
|
if workers == 1:
|
||||||
results = []
|
|
||||||
for arg in args:
|
for arg in args:
|
||||||
results.append(arg[0](*arg[1:]))
|
results.append(arg[0](*arg[1:]))
|
||||||
return results
|
return results
|
||||||
|
|
||||||
with ThreadPoolExecutor(max_workers=workers) as executor:
|
with ThreadPoolExecutor(max_workers=workers) as executor:
|
||||||
|
futures_list: list[Future[Any]] = []
|
||||||
|
|
||||||
for arg in args:
|
for arg in args:
|
||||||
# * arg unpacks the list into actual arguments
|
# * arg unpacks the list into actual arguments
|
||||||
futures_list.append(executor.submit(*arg))
|
futures_list.append(executor.submit(*arg))
|
||||||
|
|
||||||
for future in futures_list:
|
for out in futures_list:
|
||||||
try:
|
try:
|
||||||
result = future.result()
|
result = out.result()
|
||||||
results.append(result)
|
results.append(result)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise Exception(e)
|
raise Exception(e)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def parse_string_to_list(string: str | None) -> list[str]:
|
||||||
|
output: list[str] = []
|
||||||
|
if string and len(string) > 0:
|
||||||
|
output = string.split(",")
|
||||||
|
|
||||||
|
return output
|
||||||
|
|||||||
884
src/jellyfin.py
884
src/jellyfin.py
@@ -1,859 +1,25 @@
|
|||||||
import traceback, os
|
from src.jellyfin_emby import JellyfinEmby
|
||||||
from math import floor
|
from packaging.version import parse, Version
|
||||||
from dotenv import load_dotenv
|
|
||||||
import requests
|
|
||||||
|
class Jellyfin(JellyfinEmby):
|
||||||
from src.functions import (
|
def __init__(self, baseurl, token):
|
||||||
logger,
|
authorization = (
|
||||||
search_mapping,
|
"MediaBrowser , "
|
||||||
contains_nested,
|
'Client="JellyPlex-Watched", '
|
||||||
log_marked,
|
'Device="script", '
|
||||||
str_to_bool,
|
'DeviceId="script", '
|
||||||
)
|
'Version="6.0.2", '
|
||||||
from src.library import (
|
f'Token="{token}"'
|
||||||
check_skip_logic,
|
)
|
||||||
generate_library_guids_dict,
|
headers = {
|
||||||
)
|
"Accept": "application/json",
|
||||||
from src.watched import (
|
"Authorization": authorization,
|
||||||
combine_watched_dicts,
|
}
|
||||||
)
|
|
||||||
|
super().__init__(
|
||||||
load_dotenv(override=True)
|
server_type="Jellyfin", baseurl=baseurl, token=token, headers=headers
|
||||||
|
)
|
||||||
generate_guids = str_to_bool(os.getenv("GENERATE_GUIDS", "True"))
|
|
||||||
generate_locations = str_to_bool(os.getenv("GENERATE_LOCATIONS", "True"))
|
def is_partial_update_supported(self, server_version: Version) -> bool:
|
||||||
|
return server_version >= parse("10.9.0")
|
||||||
|
|
||||||
def get_guids(item):
|
|
||||||
if item.get("Name"):
|
|
||||||
guids = {"title": item.get("Name")}
|
|
||||||
else:
|
|
||||||
logger(f"Jellyfin: Name not found in {item.get('Id')}", 1)
|
|
||||||
guids = {"title": None}
|
|
||||||
|
|
||||||
if "ProviderIds" in item:
|
|
||||||
guids.update({k.lower(): v for k, v in item["ProviderIds"].items()})
|
|
||||||
else:
|
|
||||||
logger(f"Jellyfin: ProviderIds not found in {item.get('Name')}", 1)
|
|
||||||
|
|
||||||
if "MediaSources" in item:
|
|
||||||
guids["locations"] = tuple(
|
|
||||||
[x["Path"].split("/")[-1] for x in item["MediaSources"] if "Path" in x]
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger(f"Jellyfin: MediaSources not found in {item.get('Name')}", 1)
|
|
||||||
guids["locations"] = tuple()
|
|
||||||
|
|
||||||
if "UserData" in item:
|
|
||||||
guids["status"] = {
|
|
||||||
"completed": item["UserData"]["Played"],
|
|
||||||
# Convert ticks to milliseconds to match Plex
|
|
||||||
"time": floor(item["UserData"]["PlaybackPositionTicks"] / 10000),
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
logger(f"Jellyfin: UserData not found in {item.get('Name')}", 1)
|
|
||||||
guids["status"] = {}
|
|
||||||
|
|
||||||
return guids
|
|
||||||
|
|
||||||
|
|
||||||
def get_video_status(jellyfin_video, videos_ids, videos):
|
|
||||||
video_status = None
|
|
||||||
|
|
||||||
if generate_locations:
|
|
||||||
if "MediaSources" in jellyfin_video:
|
|
||||||
for video_location in jellyfin_video["MediaSources"]:
|
|
||||||
if "Path" in video_location:
|
|
||||||
if (
|
|
||||||
contains_nested(
|
|
||||||
video_location["Path"].split("/")[-1],
|
|
||||||
videos_ids["locations"],
|
|
||||||
)
|
|
||||||
is not None
|
|
||||||
):
|
|
||||||
for video in videos:
|
|
||||||
if (
|
|
||||||
contains_nested(
|
|
||||||
video_location["Path"].split("/")[-1],
|
|
||||||
video["locations"],
|
|
||||||
)
|
|
||||||
is not None
|
|
||||||
):
|
|
||||||
video_status = video["status"]
|
|
||||||
break
|
|
||||||
break
|
|
||||||
|
|
||||||
if generate_guids:
|
|
||||||
if not video_status:
|
|
||||||
for (
|
|
||||||
video_provider_source,
|
|
||||||
video_provider_id,
|
|
||||||
) in jellyfin_video["ProviderIds"].items():
|
|
||||||
if video_provider_source.lower() in videos_ids:
|
|
||||||
if (
|
|
||||||
video_provider_id.lower()
|
|
||||||
in videos_ids[video_provider_source.lower()]
|
|
||||||
):
|
|
||||||
for video in videos:
|
|
||||||
if video_provider_id.lower() in video.get(
|
|
||||||
video_provider_source.lower(), []
|
|
||||||
):
|
|
||||||
video_status = video["status"]
|
|
||||||
break
|
|
||||||
break
|
|
||||||
|
|
||||||
return video_status
|
|
||||||
|
|
||||||
|
|
||||||
class Jellyfin:
|
|
||||||
def __init__(self, baseurl, token):
|
|
||||||
self.baseurl = baseurl
|
|
||||||
self.token = token
|
|
||||||
self.timeout = int(os.getenv("REQUEST_TIMEOUT", 300))
|
|
||||||
|
|
||||||
if not self.baseurl:
|
|
||||||
raise Exception("Jellyfin baseurl not set")
|
|
||||||
|
|
||||||
if not self.token:
|
|
||||||
raise Exception("Jellyfin token not set")
|
|
||||||
|
|
||||||
self.session = requests.Session()
|
|
||||||
self.users = self.get_users()
|
|
||||||
|
|
||||||
def query(self, query, query_type, session=None, identifiers=None):
|
|
||||||
try:
|
|
||||||
results = None
|
|
||||||
|
|
||||||
authorization = (
|
|
||||||
"MediaBrowser , "
|
|
||||||
'Client="other", '
|
|
||||||
'Device="script", '
|
|
||||||
'DeviceId="script", '
|
|
||||||
'Version="0.0.0"'
|
|
||||||
)
|
|
||||||
headers = {
|
|
||||||
"Accept": "application/json",
|
|
||||||
"X-Emby-Token": self.token,
|
|
||||||
"X-Emby-Authorization": authorization,
|
|
||||||
}
|
|
||||||
|
|
||||||
if query_type == "get":
|
|
||||||
response = self.session.get(
|
|
||||||
self.baseurl + query, headers=headers, timeout=self.timeout
|
|
||||||
)
|
|
||||||
if response.status_code != 200:
|
|
||||||
raise Exception(
|
|
||||||
f"Query failed with status {response.status_code} {response.reason}"
|
|
||||||
)
|
|
||||||
results = response.json()
|
|
||||||
|
|
||||||
elif query_type == "post":
|
|
||||||
response = self.session.post(
|
|
||||||
self.baseurl + query, headers=headers, timeout=self.timeout
|
|
||||||
)
|
|
||||||
if response.status_code != 200:
|
|
||||||
raise Exception(
|
|
||||||
f"Query failed with status {response.status_code} {response.reason}"
|
|
||||||
)
|
|
||||||
results = response.json()
|
|
||||||
|
|
||||||
if not isinstance(results, list) and not isinstance(results, dict):
|
|
||||||
raise Exception("Query result is not of type list or dict")
|
|
||||||
|
|
||||||
# append identifiers to results
|
|
||||||
if identifiers:
|
|
||||||
results["Identifiers"] = identifiers
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger(f"Jellyfin: Query {query_type} {query}\nResults {results}\n{e}", 2)
|
|
||||||
raise Exception(e)
|
|
||||||
|
|
||||||
def info(self) -> str:
|
|
||||||
try:
|
|
||||||
query_string = "/System/Info/Public"
|
|
||||||
|
|
||||||
response = self.query(query_string, "get")
|
|
||||||
|
|
||||||
if response:
|
|
||||||
return f"{response['ServerName']}: {response['Version']}"
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger(f"Jellyfin: Get server name failed {e}", 2)
|
|
||||||
raise Exception(e)
|
|
||||||
|
|
||||||
def get_users(self):
|
|
||||||
try:
|
|
||||||
users = {}
|
|
||||||
|
|
||||||
query_string = "/Users"
|
|
||||||
response = self.query(query_string, "get")
|
|
||||||
|
|
||||||
# If response is not empty
|
|
||||||
if response:
|
|
||||||
for user in response:
|
|
||||||
users[user["Name"]] = user["Id"]
|
|
||||||
|
|
||||||
return users
|
|
||||||
except Exception as e:
|
|
||||||
logger(f"Jellyfin: Get users failed {e}", 2)
|
|
||||||
raise Exception(e)
|
|
||||||
|
|
||||||
def get_user_library_watched(
|
|
||||||
self, user_name, user_id, library_type, library_id, library_title
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
user_name = user_name.lower()
|
|
||||||
user_watched = {}
|
|
||||||
user_watched[user_name] = {}
|
|
||||||
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Generating watched for {user_name} in library {library_title}",
|
|
||||||
0,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Movies
|
|
||||||
if library_type == "Movie":
|
|
||||||
user_watched[user_name][library_title] = []
|
|
||||||
watched = self.query(
|
|
||||||
f"/Users/{user_id}/Items"
|
|
||||||
+ f"?ParentId={library_id}&Filters=IsPlayed&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources",
|
|
||||||
"get",
|
|
||||||
)
|
|
||||||
|
|
||||||
in_progress = self.query(
|
|
||||||
f"/Users/{user_id}/Items"
|
|
||||||
+ f"?ParentId={library_id}&Filters=IsResumable&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources",
|
|
||||||
"get",
|
|
||||||
)
|
|
||||||
|
|
||||||
for movie in watched["Items"] + in_progress["Items"]:
|
|
||||||
if "MediaSources" in movie and movie["MediaSources"] != {}:
|
|
||||||
if "UserData" not in movie:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Skip if not watched or watched less than a minute
|
|
||||||
if (
|
|
||||||
movie["UserData"]["Played"] == True
|
|
||||||
or movie["UserData"]["PlaybackPositionTicks"] > 600000000
|
|
||||||
):
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Adding {movie.get('Name')} to {user_name} watched list",
|
|
||||||
3,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get the movie's GUIDs
|
|
||||||
movie_guids = get_guids(movie)
|
|
||||||
|
|
||||||
# Append the movie dictionary to the list for the given user and library
|
|
||||||
user_watched[user_name][library_title].append(movie_guids)
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Added {movie_guids} to {user_name} watched list",
|
|
||||||
3,
|
|
||||||
)
|
|
||||||
|
|
||||||
# TV Shows
|
|
||||||
if library_type in ["Series", "Episode"]:
|
|
||||||
# Initialize an empty dictionary for the given user and library
|
|
||||||
user_watched[user_name][library_title] = {}
|
|
||||||
|
|
||||||
# Retrieve a list of watched TV shows
|
|
||||||
watched_shows = self.query(
|
|
||||||
f"/Users/{user_id}/Items"
|
|
||||||
+ f"?ParentId={library_id}&isPlaceHolder=false&IncludeItemTypes=Series&Recursive=True&Fields=ProviderIds,Path,RecursiveItemCount",
|
|
||||||
"get",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Filter the list of shows to only include those that have been partially or fully watched
|
|
||||||
watched_shows_filtered = []
|
|
||||||
for show in watched_shows["Items"]:
|
|
||||||
if not "UserData" in show:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if "PlayedPercentage" in show["UserData"]:
|
|
||||||
if show["UserData"]["PlayedPercentage"] > 0:
|
|
||||||
watched_shows_filtered.append(show)
|
|
||||||
|
|
||||||
# Retrieve the seasons of each watched show
|
|
||||||
seasons_watched = []
|
|
||||||
for show in watched_shows_filtered:
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Adding {show.get('Name')} to {user_name} watched list",
|
|
||||||
3,
|
|
||||||
)
|
|
||||||
show_guids = {k.lower(): v for k, v in show["ProviderIds"].items()}
|
|
||||||
show_guids["title"] = show["Name"]
|
|
||||||
show_guids["locations"] = (
|
|
||||||
tuple([show["Path"].split("/")[-1]])
|
|
||||||
if "Path" in show
|
|
||||||
else tuple()
|
|
||||||
)
|
|
||||||
show_guids = frozenset(show_guids.items())
|
|
||||||
show_identifiers = {
|
|
||||||
"show_guids": show_guids,
|
|
||||||
"show_id": show["Id"],
|
|
||||||
}
|
|
||||||
|
|
||||||
seasons_watched.append(
|
|
||||||
self.query(
|
|
||||||
f"/Shows/{show['Id']}/Seasons"
|
|
||||||
+ f"?userId={user_id}&isPlaceHolder=false&Fields=ProviderIds,RecursiveItemCount",
|
|
||||||
"get",
|
|
||||||
identifiers=frozenset(show_identifiers.items()),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Filter the list of seasons to only include those that have been partially or fully watched
|
|
||||||
seasons_watched_filtered = []
|
|
||||||
for seasons in seasons_watched:
|
|
||||||
seasons_watched_filtered_dict = {}
|
|
||||||
seasons_watched_filtered_dict["Identifiers"] = seasons[
|
|
||||||
"Identifiers"
|
|
||||||
]
|
|
||||||
seasons_watched_filtered_dict["Items"] = []
|
|
||||||
for season in seasons["Items"]:
|
|
||||||
if "PlayedPercentage" in season["UserData"]:
|
|
||||||
if season["UserData"]["PlayedPercentage"] > 0:
|
|
||||||
seasons_watched_filtered_dict["Items"].append(season)
|
|
||||||
|
|
||||||
if seasons_watched_filtered_dict["Items"]:
|
|
||||||
seasons_watched_filtered.append(seasons_watched_filtered_dict)
|
|
||||||
|
|
||||||
# Create a list of tasks to retrieve the episodes of each watched season
|
|
||||||
watched_episodes = []
|
|
||||||
for seasons in seasons_watched_filtered:
|
|
||||||
if len(seasons["Items"]) > 0:
|
|
||||||
for season in seasons["Items"]:
|
|
||||||
if "IndexNumber" not in season:
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Skipping show {season.get('SeriesName')} season {season.get('Name')} as it has no index number",
|
|
||||||
3,
|
|
||||||
)
|
|
||||||
|
|
||||||
continue
|
|
||||||
season_identifiers = dict(seasons["Identifiers"])
|
|
||||||
season_identifiers["season_index"] = season["IndexNumber"]
|
|
||||||
watched_task = self.query(
|
|
||||||
f"/Shows/{season_identifiers['show_id']}/Episodes"
|
|
||||||
+ f"?seasonId={season['Id']}&userId={user_id}&isPlaceHolder=false&Filters=IsPlayed&Fields=ProviderIds,MediaSources",
|
|
||||||
"get",
|
|
||||||
identifiers=frozenset(season_identifiers.items()),
|
|
||||||
)
|
|
||||||
|
|
||||||
in_progress_task = self.query(
|
|
||||||
f"/Shows/{season_identifiers['show_id']}/Episodes"
|
|
||||||
+ f"?seasonId={season['Id']}&userId={user_id}&isPlaceHolder=false&Filters=IsResumable&Fields=ProviderIds,MediaSources",
|
|
||||||
"get",
|
|
||||||
identifiers=frozenset(season_identifiers.items()),
|
|
||||||
)
|
|
||||||
watched_episodes.append(watched_task)
|
|
||||||
watched_episodes.append(in_progress_task)
|
|
||||||
|
|
||||||
# Iterate through the watched episodes
|
|
||||||
for episodes in watched_episodes:
|
|
||||||
# If the season has any watched episodes
|
|
||||||
if len(episodes["Items"]) > 0:
|
|
||||||
# Create a dictionary for the season with its identifier and episodes
|
|
||||||
season_dict = {}
|
|
||||||
season_dict["Identifiers"] = dict(episodes["Identifiers"])
|
|
||||||
season_dict["Episodes"] = []
|
|
||||||
for episode in episodes["Items"]:
|
|
||||||
if (
|
|
||||||
"MediaSources" in episode
|
|
||||||
and episode["MediaSources"] != {}
|
|
||||||
):
|
|
||||||
# If watched or watched more than a minute
|
|
||||||
if (
|
|
||||||
episode["UserData"]["Played"] == True
|
|
||||||
or episode["UserData"]["PlaybackPositionTicks"]
|
|
||||||
> 600000000
|
|
||||||
):
|
|
||||||
episode_dict = get_guids(episode)
|
|
||||||
# Add the episode dictionary to the season's list of episodes
|
|
||||||
season_dict["Episodes"].append(episode_dict)
|
|
||||||
|
|
||||||
# Add the season dictionary to the show's list of seasons
|
|
||||||
if (
|
|
||||||
season_dict["Identifiers"]["show_guids"]
|
|
||||||
not in user_watched[user_name][library_title]
|
|
||||||
):
|
|
||||||
user_watched[user_name][library_title][
|
|
||||||
season_dict["Identifiers"]["show_guids"]
|
|
||||||
] = {}
|
|
||||||
|
|
||||||
if (
|
|
||||||
season_dict["Identifiers"]["season_index"]
|
|
||||||
not in user_watched[user_name][library_title][
|
|
||||||
season_dict["Identifiers"]["show_guids"]
|
|
||||||
]
|
|
||||||
):
|
|
||||||
user_watched[user_name][library_title][
|
|
||||||
season_dict["Identifiers"]["show_guids"]
|
|
||||||
][season_dict["Identifiers"]["season_index"]] = []
|
|
||||||
|
|
||||||
user_watched[user_name][library_title][
|
|
||||||
season_dict["Identifiers"]["show_guids"]
|
|
||||||
][season_dict["Identifiers"]["season_index"]] = season_dict[
|
|
||||||
"Episodes"
|
|
||||||
]
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Added {season_dict['Episodes']} to {user_name} {season_dict['Identifiers']['show_guids']} watched list",
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Got watched for {user_name} in library {library_title}", 1
|
|
||||||
)
|
|
||||||
if library_title in user_watched[user_name]:
|
|
||||||
logger(f"Jellyfin: {user_watched[user_name][library_title]}", 3)
|
|
||||||
|
|
||||||
return user_watched
|
|
||||||
except Exception as e:
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Failed to get watched for {user_name} in library {library_title}, Error: {e}",
|
|
||||||
2,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger(traceback.format_exc(), 2)
|
|
||||||
return {}
|
|
||||||
|
|
||||||
def get_users_watched(
|
|
||||||
self,
|
|
||||||
user_name,
|
|
||||||
user_id,
|
|
||||||
blacklist_library,
|
|
||||||
whitelist_library,
|
|
||||||
blacklist_library_type,
|
|
||||||
whitelist_library_type,
|
|
||||||
library_mapping,
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
# Get all libraries
|
|
||||||
user_name = user_name.lower()
|
|
||||||
watched = []
|
|
||||||
|
|
||||||
libraries = []
|
|
||||||
|
|
||||||
all_libraries = self.query(f"/Users/{user_id}/Views", "get")
|
|
||||||
for library in all_libraries["Items"]:
|
|
||||||
library_id = library["Id"]
|
|
||||||
library_title = library["Name"]
|
|
||||||
identifiers = {
|
|
||||||
"library_id": library_id,
|
|
||||||
"library_title": library_title,
|
|
||||||
}
|
|
||||||
libraries.append(
|
|
||||||
self.query(
|
|
||||||
f"/Users/{user_id}/Items"
|
|
||||||
+ f"?ParentId={library_id}&Filters=IsPlayed&Recursive=True&excludeItemTypes=Folder&limit=100",
|
|
||||||
"get",
|
|
||||||
identifiers=identifiers,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
for library in libraries:
|
|
||||||
if len(library["Items"]) == 0:
|
|
||||||
continue
|
|
||||||
|
|
||||||
library_id = library["Identifiers"]["library_id"]
|
|
||||||
library_title = library["Identifiers"]["library_title"]
|
|
||||||
# Get all library types excluding "Folder"
|
|
||||||
types = set(
|
|
||||||
[
|
|
||||||
x["Type"]
|
|
||||||
for x in library["Items"]
|
|
||||||
if x["Type"] in ["Movie", "Series", "Episode"]
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
skip_reason = check_skip_logic(
|
|
||||||
library_title,
|
|
||||||
types,
|
|
||||||
blacklist_library,
|
|
||||||
whitelist_library,
|
|
||||||
blacklist_library_type,
|
|
||||||
whitelist_library_type,
|
|
||||||
library_mapping,
|
|
||||||
)
|
|
||||||
|
|
||||||
if skip_reason:
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Skipping library {library_title}: {skip_reason}",
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# If there are multiple types in library raise error
|
|
||||||
if types is None or len(types) < 1:
|
|
||||||
all_types = set([x["Type"] for x in library["Items"]])
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Skipping Library {library_title} found types: {types}, all types: {all_types}",
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
for library_type in types:
|
|
||||||
# Get watched for user
|
|
||||||
watched.append(
|
|
||||||
self.get_user_library_watched(
|
|
||||||
user_name,
|
|
||||||
user_id,
|
|
||||||
library_type,
|
|
||||||
library_id,
|
|
||||||
library_title,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return watched
|
|
||||||
except Exception as e:
|
|
||||||
logger(f"Jellyfin: Failed to get users watched, Error: {e}", 2)
|
|
||||||
raise Exception(e)
|
|
||||||
|
|
||||||
def get_watched(
|
|
||||||
self,
|
|
||||||
users,
|
|
||||||
blacklist_library,
|
|
||||||
whitelist_library,
|
|
||||||
blacklist_library_type,
|
|
||||||
whitelist_library_type,
|
|
||||||
library_mapping=None,
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
users_watched = {}
|
|
||||||
watched = []
|
|
||||||
|
|
||||||
for user_name, user_id in users.items():
|
|
||||||
watched.append(
|
|
||||||
self.get_users_watched(
|
|
||||||
user_name,
|
|
||||||
user_id,
|
|
||||||
blacklist_library,
|
|
||||||
whitelist_library,
|
|
||||||
blacklist_library_type,
|
|
||||||
whitelist_library_type,
|
|
||||||
library_mapping,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
for user_watched in watched:
|
|
||||||
user_watched_combine = combine_watched_dicts(user_watched)
|
|
||||||
for user, user_watched_temp in user_watched_combine.items():
|
|
||||||
if user not in users_watched:
|
|
||||||
users_watched[user] = {}
|
|
||||||
users_watched[user].update(user_watched_temp)
|
|
||||||
|
|
||||||
return users_watched
|
|
||||||
except Exception as e:
|
|
||||||
logger(f"Jellyfin: Failed to get watched, Error: {e}", 2)
|
|
||||||
raise Exception(e)
|
|
||||||
|
|
||||||
def update_user_watched(
|
|
||||||
self, user_name, user_id, library, library_id, videos, dryrun
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Updating watched for {user_name} in library {library}", 1
|
|
||||||
)
|
|
||||||
(
|
|
||||||
videos_shows_ids,
|
|
||||||
videos_episodes_ids,
|
|
||||||
videos_movies_ids,
|
|
||||||
) = generate_library_guids_dict(videos)
|
|
||||||
|
|
||||||
if (
|
|
||||||
not videos_movies_ids
|
|
||||||
and not videos_shows_ids
|
|
||||||
and not videos_episodes_ids
|
|
||||||
):
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: No videos to mark as watched for {user_name} in library {library}",
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
|
|
||||||
return
|
|
||||||
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: mark list\nShows: {videos_shows_ids}\nEpisodes: {videos_episodes_ids}\nMovies: {videos_movies_ids}",
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
|
|
||||||
if videos_movies_ids:
|
|
||||||
jellyfin_search = self.query(
|
|
||||||
f"/Users/{user_id}/Items"
|
|
||||||
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=True&ParentId={library_id}"
|
|
||||||
+ "&isPlayed=false&Fields=ItemCounts,ProviderIds,MediaSources&IncludeItemTypes=Movie",
|
|
||||||
"get",
|
|
||||||
)
|
|
||||||
for jellyfin_video in jellyfin_search["Items"]:
|
|
||||||
movie_status = get_video_status(
|
|
||||||
jellyfin_video, videos_movies_ids, videos
|
|
||||||
)
|
|
||||||
|
|
||||||
if movie_status:
|
|
||||||
jellyfin_video_id = jellyfin_video["Id"]
|
|
||||||
if movie_status["completed"]:
|
|
||||||
msg = f"Jellyfin: {jellyfin_video.get('Name')} as watched for {user_name} in {library}"
|
|
||||||
if not dryrun:
|
|
||||||
logger(msg, 5)
|
|
||||||
self.query(
|
|
||||||
f"/Users/{user_id}/PlayedItems/{jellyfin_video_id}",
|
|
||||||
"post",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger(msg, 6)
|
|
||||||
|
|
||||||
log_marked(
|
|
||||||
user_name,
|
|
||||||
library,
|
|
||||||
jellyfin_video.get("Name"),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# TODO add support for partially watched movies
|
|
||||||
msg = f"Jellyfin: {jellyfin_video.get('Name')} as partially watched for {floor(movie_status['time'] / 60_000)} minutes for {user_name} in {library}"
|
|
||||||
"""
|
|
||||||
if not dryrun:
|
|
||||||
pass
|
|
||||||
# logger(msg, 5)
|
|
||||||
else:
|
|
||||||
pass
|
|
||||||
# logger(msg, 6)
|
|
||||||
|
|
||||||
log_marked(
|
|
||||||
user_name,
|
|
||||||
library,
|
|
||||||
jellyfin_video.get("Name"),
|
|
||||||
duration=floor(movie_status["time"] / 60_000),
|
|
||||||
)"""
|
|
||||||
else:
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Skipping movie {jellyfin_video.get('Name')} as it is not in mark list for {user_name}",
|
|
||||||
3,
|
|
||||||
)
|
|
||||||
|
|
||||||
# TV Shows
|
|
||||||
if videos_shows_ids and videos_episodes_ids:
|
|
||||||
jellyfin_search = self.query(
|
|
||||||
f"/Users/{user_id}/Items"
|
|
||||||
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=True&ParentId={library_id}"
|
|
||||||
+ "&Fields=ItemCounts,ProviderIds,Path&IncludeItemTypes=Series",
|
|
||||||
"get",
|
|
||||||
)
|
|
||||||
jellyfin_shows = [x for x in jellyfin_search["Items"]]
|
|
||||||
|
|
||||||
for jellyfin_show in jellyfin_shows:
|
|
||||||
show_found = False
|
|
||||||
episode_videos = []
|
|
||||||
|
|
||||||
if generate_locations:
|
|
||||||
if "Path" in jellyfin_show:
|
|
||||||
if (
|
|
||||||
contains_nested(
|
|
||||||
jellyfin_show["Path"].split("/")[-1],
|
|
||||||
videos_shows_ids["locations"],
|
|
||||||
)
|
|
||||||
is not None
|
|
||||||
):
|
|
||||||
show_found = True
|
|
||||||
for shows, seasons in videos.items():
|
|
||||||
show = {k: v for k, v in shows}
|
|
||||||
if (
|
|
||||||
contains_nested(
|
|
||||||
jellyfin_show["Path"].split("/")[-1],
|
|
||||||
show["locations"],
|
|
||||||
)
|
|
||||||
is not None
|
|
||||||
):
|
|
||||||
for season in seasons.values():
|
|
||||||
for episode in season:
|
|
||||||
episode_videos.append(episode)
|
|
||||||
|
|
||||||
break
|
|
||||||
|
|
||||||
if generate_guids:
|
|
||||||
if not show_found:
|
|
||||||
for show_provider_source, show_provider_id in jellyfin_show[
|
|
||||||
"ProviderIds"
|
|
||||||
].items():
|
|
||||||
if show_provider_source.lower() in videos_shows_ids:
|
|
||||||
if (
|
|
||||||
show_provider_id.lower()
|
|
||||||
in videos_shows_ids[
|
|
||||||
show_provider_source.lower()
|
|
||||||
]
|
|
||||||
):
|
|
||||||
show_found = True
|
|
||||||
for show, seasons in videos.items():
|
|
||||||
show = {k: v for k, v in show}
|
|
||||||
if show_provider_id.lower() in show.get(
|
|
||||||
show_provider_source.lower(), []
|
|
||||||
):
|
|
||||||
for season in seasons.values():
|
|
||||||
for episode in season:
|
|
||||||
episode_videos.append(episode)
|
|
||||||
|
|
||||||
break
|
|
||||||
|
|
||||||
if show_found:
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Updating watched for {user_name} in library {library} for show {jellyfin_show.get('Name')}",
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
jellyfin_show_id = jellyfin_show["Id"]
|
|
||||||
jellyfin_episodes = self.query(
|
|
||||||
f"/Shows/{jellyfin_show_id}/Episodes"
|
|
||||||
+ f"?userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources",
|
|
||||||
"get",
|
|
||||||
)
|
|
||||||
|
|
||||||
for jellyfin_episode in jellyfin_episodes["Items"]:
|
|
||||||
episode_status = get_video_status(
|
|
||||||
jellyfin_episode, videos_episodes_ids, episode_videos
|
|
||||||
)
|
|
||||||
|
|
||||||
if episode_status:
|
|
||||||
jellyfin_episode_id = jellyfin_episode["Id"]
|
|
||||||
if episode_status["completed"]:
|
|
||||||
msg = (
|
|
||||||
f"Jellyfin: {jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode.get('IndexNumber')} {jellyfin_episode.get('Name')}"
|
|
||||||
+ f" as watched for {user_name} in {library}"
|
|
||||||
)
|
|
||||||
if not dryrun:
|
|
||||||
logger(msg, 5)
|
|
||||||
self.query(
|
|
||||||
f"/Users/{user_id}/PlayedItems/{jellyfin_episode_id}",
|
|
||||||
"post",
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger(msg, 6)
|
|
||||||
|
|
||||||
log_marked(
|
|
||||||
user_name,
|
|
||||||
library,
|
|
||||||
jellyfin_episode.get("SeriesName"),
|
|
||||||
jellyfin_episode.get("Name"),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# TODO add support for partially watched episodes
|
|
||||||
msg = (
|
|
||||||
f"Jellyfin: {jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode.get('IndexNumber')} {jellyfin_episode.get('Name')}"
|
|
||||||
+ f" as partially watched for {floor(episode_status['time'] / 60_000)} minutes for {user_name} in {library}"
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
if not dryrun:
|
|
||||||
pass
|
|
||||||
# logger(f"Marked {msg}", 0)
|
|
||||||
else:
|
|
||||||
pass
|
|
||||||
# logger(f"Dryrun {msg}", 0)
|
|
||||||
|
|
||||||
log_marked(
|
|
||||||
user_name,
|
|
||||||
library,
|
|
||||||
jellyfin_episode.get("SeriesName"),
|
|
||||||
jellyfin_episode.get('Name'),
|
|
||||||
duration=floor(episode_status["time"] / 60_000),
|
|
||||||
)"""
|
|
||||||
else:
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Skipping episode {jellyfin_episode.get('Name')} as it is not in mark list for {user_name}",
|
|
||||||
3,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Skipping show {jellyfin_show.get('Name')} as it is not in mark list for {user_name}",
|
|
||||||
3,
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Error updating watched for {user_name} in library {library}, {e}",
|
|
||||||
2,
|
|
||||||
)
|
|
||||||
logger(traceback.format_exc(), 2)
|
|
||||||
raise Exception(e)
|
|
||||||
|
|
||||||
def update_watched(
|
|
||||||
self, watched_list, user_mapping=None, library_mapping=None, dryrun=False
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
for user, libraries in watched_list.items():
|
|
||||||
logger(f"Jellyfin: Updating for entry {user}, {libraries}", 1)
|
|
||||||
user_other = None
|
|
||||||
user_name = None
|
|
||||||
if user_mapping:
|
|
||||||
if user in user_mapping.keys():
|
|
||||||
user_other = user_mapping[user]
|
|
||||||
elif user in user_mapping.values():
|
|
||||||
user_other = search_mapping(user_mapping, user)
|
|
||||||
|
|
||||||
user_id = None
|
|
||||||
for key in self.users:
|
|
||||||
if user.lower() == key.lower():
|
|
||||||
user_id = self.users[key]
|
|
||||||
user_name = key
|
|
||||||
break
|
|
||||||
elif user_other and user_other.lower() == key.lower():
|
|
||||||
user_id = self.users[key]
|
|
||||||
user_name = key
|
|
||||||
break
|
|
||||||
|
|
||||||
if not user_id:
|
|
||||||
logger(f"{user} {user_other} not found in Jellyfin", 2)
|
|
||||||
continue
|
|
||||||
|
|
||||||
jellyfin_libraries = self.query(
|
|
||||||
f"/Users/{user_id}/Views",
|
|
||||||
"get",
|
|
||||||
)
|
|
||||||
jellyfin_libraries = [x for x in jellyfin_libraries["Items"]]
|
|
||||||
|
|
||||||
for library, videos in libraries.items():
|
|
||||||
library_other = None
|
|
||||||
if library_mapping:
|
|
||||||
if library in library_mapping.keys():
|
|
||||||
library_other = library_mapping[library]
|
|
||||||
elif library in library_mapping.values():
|
|
||||||
library_other = search_mapping(library_mapping, library)
|
|
||||||
|
|
||||||
if library.lower() not in [
|
|
||||||
x["Name"].lower() for x in jellyfin_libraries
|
|
||||||
]:
|
|
||||||
if library_other:
|
|
||||||
if library_other.lower() in [
|
|
||||||
x["Name"].lower() for x in jellyfin_libraries
|
|
||||||
]:
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Library {library} not found, but {library_other} found, using {library_other}",
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
library = library_other
|
|
||||||
else:
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Library {library} or {library_other} not found in library list",
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
logger(
|
|
||||||
f"Jellyfin: Library {library} not found in library list",
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
library_id = None
|
|
||||||
for jellyfin_library in jellyfin_libraries:
|
|
||||||
if jellyfin_library["Name"] == library:
|
|
||||||
library_id = jellyfin_library["Id"]
|
|
||||||
continue
|
|
||||||
|
|
||||||
if library_id:
|
|
||||||
self.update_user_watched(
|
|
||||||
user_name, user_id, library, library_id, videos, dryrun
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger(f"Jellyfin: Error updating watched, {e}", 2)
|
|
||||||
raise Exception(e)
|
|
||||||
|
|||||||
712
src/jellyfin_emby.py
Normal file
712
src/jellyfin_emby.py
Normal file
@@ -0,0 +1,712 @@
|
|||||||
|
# Functions for Jellyfin and Emby
|
||||||
|
|
||||||
|
import requests
|
||||||
|
import traceback
|
||||||
|
import os
|
||||||
|
from math import floor
|
||||||
|
from typing import Any, Literal
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
from packaging.version import parse, Version
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from src.functions import (
|
||||||
|
search_mapping,
|
||||||
|
log_marked,
|
||||||
|
str_to_bool,
|
||||||
|
)
|
||||||
|
from src.watched import (
|
||||||
|
LibraryData,
|
||||||
|
MediaIdentifiers,
|
||||||
|
MediaItem,
|
||||||
|
WatchedStatus,
|
||||||
|
Series,
|
||||||
|
UserData,
|
||||||
|
check_same_identifiers,
|
||||||
|
)
|
||||||
|
|
||||||
|
load_dotenv(override=True)
|
||||||
|
|
||||||
|
generate_guids = str_to_bool(os.getenv("GENERATE_GUIDS", "True"))
|
||||||
|
generate_locations = str_to_bool(os.getenv("GENERATE_LOCATIONS", "True"))
|
||||||
|
|
||||||
|
|
||||||
|
def extract_identifiers_from_item(server_type, item: dict) -> MediaIdentifiers:
|
||||||
|
title = item.get("Name", None)
|
||||||
|
id = None
|
||||||
|
if not title:
|
||||||
|
id = item.get("Id")
|
||||||
|
logger.info(f"{server_type}: Name not found in {id}")
|
||||||
|
|
||||||
|
guids = {}
|
||||||
|
if generate_guids:
|
||||||
|
guids = {k.lower(): v for k, v in item["ProviderIds"].items()}
|
||||||
|
if not guids:
|
||||||
|
logger.info(
|
||||||
|
f"{server_type}: {title if title else id} has no guids",
|
||||||
|
)
|
||||||
|
|
||||||
|
locations = tuple()
|
||||||
|
if generate_locations:
|
||||||
|
if "Path" in item:
|
||||||
|
locations = tuple([item.get("Path").split("/")[-1]])
|
||||||
|
elif "MediaSources" in item:
|
||||||
|
locations = tuple(
|
||||||
|
[x["Path"].split("/")[-1] for x in item["MediaSources"] if "Path" in x]
|
||||||
|
)
|
||||||
|
|
||||||
|
if not locations:
|
||||||
|
logger.info(f"{server_type}: {title if title else id} has no locations")
|
||||||
|
|
||||||
|
return MediaIdentifiers(
|
||||||
|
title=title,
|
||||||
|
locations=locations,
|
||||||
|
imdb_id=guids.get("imdb", None),
|
||||||
|
tvdb_id=guids.get("tvdb", None),
|
||||||
|
tmdb_id=guids.get("tmdb", None),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_mediaitem(server_type, item: dict) -> MediaItem:
|
||||||
|
return MediaItem(
|
||||||
|
identifiers=extract_identifiers_from_item(server_type, item),
|
||||||
|
status=WatchedStatus(
|
||||||
|
completed=item["UserData"]["Played"],
|
||||||
|
time=floor(item["UserData"]["PlaybackPositionTicks"] / 10000),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class JellyfinEmby:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
server_type: Literal["Jellyfin", "Emby"],
|
||||||
|
baseurl: str,
|
||||||
|
token: str,
|
||||||
|
headers: dict[str, str],
|
||||||
|
):
|
||||||
|
if server_type not in ["Jellyfin", "Emby"]:
|
||||||
|
raise Exception(f"Server type {server_type} not supported")
|
||||||
|
self.server_type = server_type
|
||||||
|
self.baseurl = baseurl
|
||||||
|
self.token = token
|
||||||
|
self.headers = headers
|
||||||
|
self.timeout = int(os.getenv("REQUEST_TIMEOUT", 300))
|
||||||
|
|
||||||
|
if not self.baseurl:
|
||||||
|
raise Exception(f"{self.server_type} baseurl not set")
|
||||||
|
|
||||||
|
if not self.token:
|
||||||
|
raise Exception(f"{self.server_type} token not set")
|
||||||
|
|
||||||
|
self.session = requests.Session()
|
||||||
|
self.users = self.get_users()
|
||||||
|
self.server_name = self.info(name_only=True)
|
||||||
|
|
||||||
|
def query(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
query_type: Literal["get", "post"],
|
||||||
|
identifiers: dict[str, str] | None = None,
|
||||||
|
json: dict[str, float] | None = None,
|
||||||
|
) -> dict[str, Any] | list[dict[str, Any]] | None:
|
||||||
|
try:
|
||||||
|
results: (
|
||||||
|
dict[str, list[Any] | dict[str, str]] | list[dict[str, Any]] | None
|
||||||
|
) = None
|
||||||
|
|
||||||
|
if query_type == "get":
|
||||||
|
response = self.session.get(
|
||||||
|
self.baseurl + query, headers=self.headers, timeout=self.timeout
|
||||||
|
)
|
||||||
|
if response.status_code not in [200, 204]:
|
||||||
|
raise Exception(
|
||||||
|
f"Query failed with status {response.status_code} {response.reason}"
|
||||||
|
)
|
||||||
|
if response.status_code == 204:
|
||||||
|
results = None
|
||||||
|
else:
|
||||||
|
results = response.json()
|
||||||
|
|
||||||
|
elif query_type == "post":
|
||||||
|
response = self.session.post(
|
||||||
|
self.baseurl + query,
|
||||||
|
headers=self.headers,
|
||||||
|
json=json,
|
||||||
|
timeout=self.timeout,
|
||||||
|
)
|
||||||
|
if response.status_code not in [200, 204]:
|
||||||
|
raise Exception(
|
||||||
|
f"Query failed with status {response.status_code} {response.reason}"
|
||||||
|
)
|
||||||
|
if response.status_code == 204:
|
||||||
|
results = None
|
||||||
|
else:
|
||||||
|
results = response.json()
|
||||||
|
|
||||||
|
if results is not None:
|
||||||
|
if not isinstance(results, list) and not isinstance(results, dict):
|
||||||
|
raise Exception("Query result is not of type list or dict")
|
||||||
|
|
||||||
|
# append identifiers to results
|
||||||
|
if identifiers and results:
|
||||||
|
results["Identifiers"] = identifiers
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"{self.server_type}: Query {query_type} {query}\nResults {results}\n{e}",
|
||||||
|
)
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
def info(
|
||||||
|
self, name_only: bool = False, version_only: bool = False
|
||||||
|
) -> str | Version | None:
|
||||||
|
try:
|
||||||
|
query_string = "/System/Info/Public"
|
||||||
|
|
||||||
|
response: dict[str, Any] = self.query(query_string, "get")
|
||||||
|
|
||||||
|
if response:
|
||||||
|
if name_only:
|
||||||
|
return response["ServerName"]
|
||||||
|
elif version_only:
|
||||||
|
return parse(response["Version"])
|
||||||
|
|
||||||
|
return f"{self.server_type} {response.get('ServerName')}: {response.get('Version')}"
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"{self.server_type}: Get server name failed {e}")
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
def get_users(self) -> dict[str, str]:
|
||||||
|
try:
|
||||||
|
users: dict[str, str] = {}
|
||||||
|
|
||||||
|
query_string = "/Users"
|
||||||
|
response: list[dict[str, str | bool]] = self.query(query_string, "get")
|
||||||
|
|
||||||
|
# If response is not empty
|
||||||
|
if response:
|
||||||
|
for user in response:
|
||||||
|
if isinstance(user["Name"], str) and isinstance(user["Id"], str):
|
||||||
|
users[user["Name"]] = user["Id"]
|
||||||
|
|
||||||
|
return users
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"{self.server_type}: Get users failed {e}")
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
def get_libraries(self) -> dict[str, str]:
|
||||||
|
try:
|
||||||
|
libraries = {}
|
||||||
|
|
||||||
|
# Theres no way to get all libraries so individually get list of libraries from all users
|
||||||
|
users = self.get_users()
|
||||||
|
|
||||||
|
for user_name, user_id in users.items():
|
||||||
|
user_libraries: dict = self.query(f"/Users/{user_id}/Views", "get")
|
||||||
|
logger.debug(f"{self.server_type}: All Libraries for {user_name} {[library.get("Name") for library in user_libraries["Items"]]}")
|
||||||
|
|
||||||
|
for library in user_libraries["Items"]:
|
||||||
|
library_title = library["Name"]
|
||||||
|
library_type = library.get("CollectionType")
|
||||||
|
|
||||||
|
if library_type not in ["movies", "tvshows"]:
|
||||||
|
logger.debug(
|
||||||
|
f"{self.server_type}: Skipping Library {library_title} found type {library_type}",
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
libraries[library_title] = library_type
|
||||||
|
|
||||||
|
return libraries
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"{self.server_type}: Get libraries failed {e}")
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
def get_user_library_watched(
|
||||||
|
self, user_name, user_id, library_type, library_id, library_title
|
||||||
|
) -> LibraryData:
|
||||||
|
user_name = user_name.lower()
|
||||||
|
try:
|
||||||
|
logger.info(
|
||||||
|
f"{self.server_type}: Generating watched for {user_name} in library {library_title}",
|
||||||
|
)
|
||||||
|
watched = LibraryData(title=library_title)
|
||||||
|
|
||||||
|
# Movies
|
||||||
|
if library_type == "Movie":
|
||||||
|
watched_items = self.query(
|
||||||
|
f"/Users/{user_id}/Items"
|
||||||
|
+ f"?ParentId={library_id}&Filters=IsPlayed&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||||
|
"get",
|
||||||
|
).get("Items", [])
|
||||||
|
|
||||||
|
in_progress_items = self.query(
|
||||||
|
f"/Users/{user_id}/Items"
|
||||||
|
+ f"?ParentId={library_id}&Filters=IsResumable&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||||
|
"get",
|
||||||
|
).get("Items", [])
|
||||||
|
|
||||||
|
for movie in watched_items + in_progress_items:
|
||||||
|
# Skip if theres no user data which means the movie has not been watched
|
||||||
|
if "UserData" not in movie:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip if theres no media tied to the movie
|
||||||
|
if "MediaSources" not in movie or movie["MediaSources"] == {}:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip if not watched or watched less than a minute
|
||||||
|
if (
|
||||||
|
movie["UserData"]["Played"] == True
|
||||||
|
or movie["UserData"]["PlaybackPositionTicks"] > 600000000
|
||||||
|
):
|
||||||
|
watched.movies.append(get_mediaitem(self.server_type, movie))
|
||||||
|
|
||||||
|
# TV Shows
|
||||||
|
if library_type in ["Series", "Episode"]:
|
||||||
|
# Retrieve a list of watched TV shows
|
||||||
|
watched_shows = self.query(
|
||||||
|
f"/Users/{user_id}/Items"
|
||||||
|
+ f"?ParentId={library_id}&isPlaceHolder=false&IncludeItemTypes=Series&Recursive=True&Fields=ProviderIds,Path,RecursiveItemCount",
|
||||||
|
"get",
|
||||||
|
).get("Items", [])
|
||||||
|
|
||||||
|
# Filter the list of shows to only include those that have been partially or fully watched
|
||||||
|
watched_shows_filtered = []
|
||||||
|
for show in watched_shows:
|
||||||
|
if "UserData" not in show:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if "PlayedPercentage" in show["UserData"]:
|
||||||
|
if show["UserData"]["PlayedPercentage"] > 0:
|
||||||
|
watched_shows_filtered.append(show)
|
||||||
|
|
||||||
|
# Retrieve the watched/partially watched list of episodes of each watched show
|
||||||
|
for show in watched_shows_filtered:
|
||||||
|
show_guids = {k.lower(): v for k, v in show["ProviderIds"].items()}
|
||||||
|
show_locations = (
|
||||||
|
tuple([show["Path"].split("/")[-1]])
|
||||||
|
if "Path" in show
|
||||||
|
else tuple()
|
||||||
|
)
|
||||||
|
|
||||||
|
show_episodes = self.query(
|
||||||
|
f"/Shows/{show['Id']}/Episodes"
|
||||||
|
+ f"?userId={user_id}&isPlaceHolder=false&Fields=ProviderIds,MediaSources",
|
||||||
|
"get",
|
||||||
|
).get("Items", [])
|
||||||
|
|
||||||
|
# Iterate through the episodes
|
||||||
|
# Create a list to store the episodes
|
||||||
|
episode_mediaitem = []
|
||||||
|
for episode in show_episodes:
|
||||||
|
if "UserData" not in episode:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if (
|
||||||
|
"MediaSources" not in episode
|
||||||
|
or episode["MediaSources"] == {}
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If watched or watched more than a minute
|
||||||
|
if (
|
||||||
|
episode["UserData"]["Played"] == True
|
||||||
|
or episode["UserData"]["PlaybackPositionTicks"] > 600000000
|
||||||
|
):
|
||||||
|
episode_mediaitem.append(
|
||||||
|
get_mediaitem(self.server_type, episode)
|
||||||
|
)
|
||||||
|
|
||||||
|
if episode_mediaitem:
|
||||||
|
watched.series.append(
|
||||||
|
Series(
|
||||||
|
identifiers=MediaIdentifiers(
|
||||||
|
title=show.get("Name"),
|
||||||
|
locations=show_locations,
|
||||||
|
imdb_id=show_guids.get("imdb", None),
|
||||||
|
tvdb_id=show_guids.get("tvdb", None),
|
||||||
|
tmdb_id=show_guids.get("tmdb", None),
|
||||||
|
),
|
||||||
|
episodes=episode_mediaitem,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"{self.server_type}: Finished getting watched for {user_name} in library {library_title}",
|
||||||
|
)
|
||||||
|
|
||||||
|
return watched
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"{self.server_type}: Failed to get watched for {user_name} in library {library_title}, Error: {e}",
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.error(traceback.format_exc())
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def get_watched(
|
||||||
|
self, users: dict[str, str], sync_libraries: list[str]
|
||||||
|
) -> dict[str, UserData]:
|
||||||
|
try:
|
||||||
|
users_watched: dict[str, UserData] = {}
|
||||||
|
|
||||||
|
for user_name, user_id in users.items():
|
||||||
|
libraries = []
|
||||||
|
|
||||||
|
all_libraries = self.query(f"/Users/{user_id}/Views", "get")
|
||||||
|
for library in all_libraries["Items"]:
|
||||||
|
library_id = library["Id"]
|
||||||
|
library_title = library["Name"]
|
||||||
|
|
||||||
|
if library_title not in sync_libraries:
|
||||||
|
continue
|
||||||
|
|
||||||
|
identifiers: dict[str, str] = {
|
||||||
|
"library_id": library_id,
|
||||||
|
"library_title": library_title,
|
||||||
|
}
|
||||||
|
libraries.append(
|
||||||
|
self.query(
|
||||||
|
f"/Users/{user_id}/Items"
|
||||||
|
+ f"?ParentId={library_id}&Filters=IsPlayed&Recursive=True&excludeItemTypes=Folder&limit=100",
|
||||||
|
"get",
|
||||||
|
identifiers=identifiers,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for library in libraries:
|
||||||
|
if len(library["Items"]) == 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
library_id: str = library["Identifiers"]["library_id"]
|
||||||
|
library_title: str = library["Identifiers"]["library_title"]
|
||||||
|
|
||||||
|
# Get all library types excluding "Folder"
|
||||||
|
types = set(
|
||||||
|
[
|
||||||
|
x["Type"]
|
||||||
|
for x in library["Items"]
|
||||||
|
if x["Type"] in ["Movie", "Series", "Episode"]
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
for library_type in types:
|
||||||
|
# Get watched for user
|
||||||
|
library_data = self.get_user_library_watched(
|
||||||
|
user_name,
|
||||||
|
user_id,
|
||||||
|
library_type,
|
||||||
|
library_id,
|
||||||
|
library_title,
|
||||||
|
)
|
||||||
|
|
||||||
|
if user_name.lower() not in users_watched:
|
||||||
|
users_watched[user_name.lower()] = UserData()
|
||||||
|
|
||||||
|
users_watched[user_name.lower()].libraries[library_title] = (
|
||||||
|
library_data
|
||||||
|
)
|
||||||
|
|
||||||
|
return users_watched
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"{self.server_type}: Failed to get watched, Error: {e}")
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
def update_user_watched(
|
||||||
|
self,
|
||||||
|
user_name: str,
|
||||||
|
user_id: str,
|
||||||
|
library_data: LibraryData,
|
||||||
|
library_name: str,
|
||||||
|
library_id: str,
|
||||||
|
update_partial: bool,
|
||||||
|
dryrun: bool,
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
# If there are no movies or shows to update, exit early.
|
||||||
|
if not library_data.series and not library_data.movies:
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"{self.server_type}: Updating watched for {user_name} in library {library_name}",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update movies.
|
||||||
|
if library_data.movies:
|
||||||
|
jellyfin_search = self.query(
|
||||||
|
f"/Users/{user_id}/Items"
|
||||||
|
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=True&ParentId={library_id}"
|
||||||
|
+ "&isPlayed=false&Fields=ItemCounts,ProviderIds,MediaSources&IncludeItemTypes=Movie",
|
||||||
|
"get",
|
||||||
|
)
|
||||||
|
for jellyfin_video in jellyfin_search["Items"]:
|
||||||
|
jelly_identifiers = extract_identifiers_from_item(
|
||||||
|
self.server_type, jellyfin_video
|
||||||
|
)
|
||||||
|
# Check each stored movie for a match.
|
||||||
|
for stored_movie in library_data.movies:
|
||||||
|
if check_same_identifiers(
|
||||||
|
jelly_identifiers, stored_movie.identifiers
|
||||||
|
):
|
||||||
|
jellyfin_video_id = jellyfin_video["Id"]
|
||||||
|
if stored_movie.status.completed:
|
||||||
|
msg = f"{self.server_type}: {jellyfin_video.get('Name')} as watched for {user_name} in {library_name}"
|
||||||
|
if not dryrun:
|
||||||
|
self.query(
|
||||||
|
f"/Users/{user_id}/PlayedItems/{jellyfin_video_id}",
|
||||||
|
"post",
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
|
||||||
|
log_marked(
|
||||||
|
self.server_type,
|
||||||
|
self.server_name,
|
||||||
|
user_name,
|
||||||
|
library_name,
|
||||||
|
jellyfin_video.get("Name"),
|
||||||
|
)
|
||||||
|
elif update_partial:
|
||||||
|
msg = f"{self.server_type}: {jellyfin_video.get('Name')} as partially watched for {floor(stored_movie.status.time / 60_000)} minutes for {user_name} in {library_name}"
|
||||||
|
|
||||||
|
if not dryrun:
|
||||||
|
playback_position_payload = {
|
||||||
|
"PlaybackPositionTicks": stored_movie.status.time
|
||||||
|
* 10_000,
|
||||||
|
}
|
||||||
|
self.query(
|
||||||
|
f"/Users/{user_id}/Items/{jellyfin_video_id}/UserData",
|
||||||
|
"post",
|
||||||
|
json=playback_position_payload,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
|
||||||
|
log_marked(
|
||||||
|
self.server_type,
|
||||||
|
self.server_name,
|
||||||
|
user_name,
|
||||||
|
library_name,
|
||||||
|
jellyfin_video.get("Name"),
|
||||||
|
duration=floor(stored_movie.status.time / 60_000),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.trace(
|
||||||
|
f"{self.server_type}: Skipping movie {jellyfin_video.get('Name')} as it is not in mark list for {user_name}",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update TV Shows (series/episodes).
|
||||||
|
if library_data.series:
|
||||||
|
jellyfin_search = self.query(
|
||||||
|
f"/Users/{user_id}/Items"
|
||||||
|
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=True&ParentId={library_id}"
|
||||||
|
+ "&Fields=ItemCounts,ProviderIds,Path&IncludeItemTypes=Series",
|
||||||
|
"get",
|
||||||
|
)
|
||||||
|
jellyfin_shows = [x for x in jellyfin_search["Items"]]
|
||||||
|
|
||||||
|
for jellyfin_show in jellyfin_shows:
|
||||||
|
jellyfin_show_identifiers = extract_identifiers_from_item(
|
||||||
|
self.server_type, jellyfin_show
|
||||||
|
)
|
||||||
|
# Try to find a matching series in your stored library.
|
||||||
|
for stored_series in library_data.series:
|
||||||
|
if check_same_identifiers(
|
||||||
|
jellyfin_show_identifiers, stored_series.identifiers
|
||||||
|
):
|
||||||
|
logger.info(
|
||||||
|
f"Found matching show for '{jellyfin_show.get('Name')}'",
|
||||||
|
)
|
||||||
|
# Now update episodes.
|
||||||
|
# Get the list of Plex episodes for this show.
|
||||||
|
jellyfin_show_id = jellyfin_show["Id"]
|
||||||
|
jellyfin_episodes = self.query(
|
||||||
|
f"/Shows/{jellyfin_show_id}/Episodes"
|
||||||
|
+ f"?userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||||
|
"get",
|
||||||
|
)
|
||||||
|
|
||||||
|
for jellyfin_episode in jellyfin_episodes["Items"]:
|
||||||
|
jellyfin_episode_identifiers = (
|
||||||
|
extract_identifiers_from_item(
|
||||||
|
self.server_type, jellyfin_episode
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for stored_ep in stored_series.episodes:
|
||||||
|
if check_same_identifiers(
|
||||||
|
jellyfin_episode_identifiers,
|
||||||
|
stored_ep.identifiers,
|
||||||
|
):
|
||||||
|
jellyfin_episode_id = jellyfin_episode["Id"]
|
||||||
|
if stored_ep.status.completed:
|
||||||
|
msg = (
|
||||||
|
f"{self.server_type}: {jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode.get('IndexNumber')} {jellyfin_episode.get('Name')}"
|
||||||
|
+ f" as watched for {user_name} in {library_name}"
|
||||||
|
)
|
||||||
|
if not dryrun:
|
||||||
|
self.query(
|
||||||
|
f"/Users/{user_id}/PlayedItems/{jellyfin_episode_id}",
|
||||||
|
"post",
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.success(
|
||||||
|
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
|
||||||
|
)
|
||||||
|
log_marked(
|
||||||
|
self.server_type,
|
||||||
|
self.server_name,
|
||||||
|
user_name,
|
||||||
|
library_name,
|
||||||
|
jellyfin_episode.get("SeriesName"),
|
||||||
|
jellyfin_episode.get("Name"),
|
||||||
|
)
|
||||||
|
elif update_partial:
|
||||||
|
msg = (
|
||||||
|
f"{self.server_type}: {jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode.get('IndexNumber')} {jellyfin_episode.get('Name')}"
|
||||||
|
+ f" as partially watched for {floor(stored_ep.status.time / 60_000)} minutes for {user_name} in {library_name}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not dryrun:
|
||||||
|
playback_position_payload = {
|
||||||
|
"PlaybackPositionTicks": stored_ep.status.time
|
||||||
|
* 10_000,
|
||||||
|
}
|
||||||
|
self.query(
|
||||||
|
f"/Users/{user_id}/Items/{jellyfin_episode_id}/UserData",
|
||||||
|
"post",
|
||||||
|
json=playback_position_payload,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.success(
|
||||||
|
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
|
||||||
|
)
|
||||||
|
log_marked(
|
||||||
|
self.server_type,
|
||||||
|
self.server_name,
|
||||||
|
user_name,
|
||||||
|
library_name,
|
||||||
|
jellyfin_episode.get("SeriesName"),
|
||||||
|
jellyfin_episode.get("Name"),
|
||||||
|
duration=floor(
|
||||||
|
stored_ep.status.time / 60_000
|
||||||
|
),
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.trace(
|
||||||
|
f"{self.server_type}: Skipping episode {jellyfin_episode.get('Name')} as it is not in mark list for {user_name}",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.trace(
|
||||||
|
f"{self.server_type}: Skipping show {jellyfin_show.get('Name')} as it is not in mark list for {user_name}",
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"{self.server_type}: Error updating watched for {user_name} in library {library_name}, {e}",
|
||||||
|
)
|
||||||
|
logger.error(traceback.format_exc())
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
def update_watched(
|
||||||
|
self,
|
||||||
|
watched_list: dict[str, UserData],
|
||||||
|
user_mapping=None,
|
||||||
|
library_mapping=None,
|
||||||
|
dryrun=False,
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
server_version = self.info(version_only=True)
|
||||||
|
update_partial = self.is_partial_update_supported(server_version)
|
||||||
|
|
||||||
|
if not update_partial:
|
||||||
|
logger.info(
|
||||||
|
f"{self.server_type}: Server version {server_version} does not support updating playback position.",
|
||||||
|
)
|
||||||
|
|
||||||
|
for user, user_data in watched_list.items():
|
||||||
|
user_other = None
|
||||||
|
user_name = None
|
||||||
|
if user_mapping:
|
||||||
|
if user in user_mapping.keys():
|
||||||
|
user_other = user_mapping[user]
|
||||||
|
elif user in user_mapping.values():
|
||||||
|
user_other = search_mapping(user_mapping, user)
|
||||||
|
|
||||||
|
user_id = None
|
||||||
|
for key in self.users:
|
||||||
|
if user.lower() == key.lower():
|
||||||
|
user_id = self.users[key]
|
||||||
|
user_name = key
|
||||||
|
break
|
||||||
|
elif user_other and user_other.lower() == key.lower():
|
||||||
|
user_id = self.users[key]
|
||||||
|
user_name = key
|
||||||
|
break
|
||||||
|
|
||||||
|
if not user_id:
|
||||||
|
logger.info(f"{user} {user_other} not found in Jellyfin")
|
||||||
|
continue
|
||||||
|
|
||||||
|
jellyfin_libraries = self.query(
|
||||||
|
f"/Users/{user_id}/Views",
|
||||||
|
"get",
|
||||||
|
)
|
||||||
|
jellyfin_libraries = [x for x in jellyfin_libraries["Items"]]
|
||||||
|
|
||||||
|
for library_name in user_data.libraries:
|
||||||
|
library_data = user_data.libraries[library_name]
|
||||||
|
library_other = None
|
||||||
|
if library_mapping:
|
||||||
|
if library_name in library_mapping.keys():
|
||||||
|
library_other = library_mapping[library_name]
|
||||||
|
elif library_name in library_mapping.values():
|
||||||
|
library_other = search_mapping(
|
||||||
|
library_mapping, library_name
|
||||||
|
)
|
||||||
|
|
||||||
|
if library_name.lower() not in [
|
||||||
|
x["Name"].lower() for x in jellyfin_libraries
|
||||||
|
]:
|
||||||
|
if library_other:
|
||||||
|
if library_other.lower() in [
|
||||||
|
x["Name"].lower() for x in jellyfin_libraries
|
||||||
|
]:
|
||||||
|
logger.info(
|
||||||
|
f"{self.server_type}: Library {library_name} not found, but {library_other} found, using {library_other}",
|
||||||
|
)
|
||||||
|
library_name = library_other
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
f"{self.server_type}: Library {library_name} or {library_other} not found in library list",
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
f"{self.server_type}: Library {library_name} not found in library list",
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
library_id = None
|
||||||
|
for jellyfin_library in jellyfin_libraries:
|
||||||
|
if jellyfin_library["Name"] == library_name:
|
||||||
|
library_id = jellyfin_library["Id"]
|
||||||
|
continue
|
||||||
|
|
||||||
|
if library_id:
|
||||||
|
self.update_user_watched(
|
||||||
|
user_name,
|
||||||
|
user_id,
|
||||||
|
library_data,
|
||||||
|
library_name,
|
||||||
|
library_id,
|
||||||
|
update_partial,
|
||||||
|
dryrun,
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"{self.server_type}: Error updating watched, {e}")
|
||||||
|
raise Exception(e)
|
||||||
239
src/library.py
239
src/library.py
@@ -1,18 +1,20 @@
|
|||||||
|
from loguru import logger
|
||||||
|
|
||||||
from src.functions import (
|
from src.functions import (
|
||||||
logger,
|
match_list,
|
||||||
search_mapping,
|
search_mapping,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def check_skip_logic(
|
def check_skip_logic(
|
||||||
library_title,
|
library_title: str,
|
||||||
library_type,
|
library_type: str,
|
||||||
blacklist_library,
|
blacklist_library: list[str],
|
||||||
whitelist_library,
|
whitelist_library: list[str],
|
||||||
blacklist_library_type,
|
blacklist_library_type: list[str],
|
||||||
whitelist_library_type,
|
whitelist_library_type: list[str],
|
||||||
library_mapping=None,
|
library_mapping: dict[str, str] | None = None,
|
||||||
):
|
) -> str | None:
|
||||||
skip_reason = None
|
skip_reason = None
|
||||||
library_other = None
|
library_other = None
|
||||||
if library_mapping:
|
if library_mapping:
|
||||||
@@ -47,11 +49,11 @@ def check_skip_logic(
|
|||||||
|
|
||||||
|
|
||||||
def check_blacklist_logic(
|
def check_blacklist_logic(
|
||||||
library_title,
|
library_title: str,
|
||||||
library_type,
|
library_type: str,
|
||||||
blacklist_library,
|
blacklist_library: list[str],
|
||||||
blacklist_library_type,
|
blacklist_library_type: list[str],
|
||||||
library_other=None,
|
library_other: str | None = None,
|
||||||
):
|
):
|
||||||
skip_reason = None
|
skip_reason = None
|
||||||
if isinstance(library_type, (list, tuple, set)):
|
if isinstance(library_type, (list, tuple, set)):
|
||||||
@@ -83,11 +85,11 @@ def check_blacklist_logic(
|
|||||||
|
|
||||||
|
|
||||||
def check_whitelist_logic(
|
def check_whitelist_logic(
|
||||||
library_title,
|
library_title: str,
|
||||||
library_type,
|
library_type: str,
|
||||||
whitelist_library,
|
whitelist_library: list[str],
|
||||||
whitelist_library_type,
|
whitelist_library_type: list[str],
|
||||||
library_other=None,
|
library_other: str | None = None,
|
||||||
):
|
):
|
||||||
skip_reason = None
|
skip_reason = None
|
||||||
if len(whitelist_library_type) > 0:
|
if len(whitelist_library_type) > 0:
|
||||||
@@ -129,150 +131,73 @@ def check_whitelist_logic(
|
|||||||
return skip_reason
|
return skip_reason
|
||||||
|
|
||||||
|
|
||||||
def show_title_dict(user_list: dict):
|
def filter_libaries(
|
||||||
try:
|
server_libraries: dict[str, str],
|
||||||
show_output_dict = {}
|
blacklist_library: list[str],
|
||||||
show_output_dict["locations"] = []
|
blacklist_library_type: list[str],
|
||||||
show_counter = 0 # Initialize a counter for the current show position
|
whitelist_library: list[str],
|
||||||
|
whitelist_library_type: list[str],
|
||||||
|
library_mapping: dict[str, str] | None = None,
|
||||||
|
) -> list[str]:
|
||||||
|
filtered_libaries: list[str] = []
|
||||||
|
for library in server_libraries:
|
||||||
|
skip_reason = check_skip_logic(
|
||||||
|
library,
|
||||||
|
server_libraries[library],
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
show_output_keys = user_list.keys()
|
if skip_reason:
|
||||||
show_output_keys = [dict(x) for x in list(show_output_keys)]
|
logger.info(f"Skipping library {library}: {skip_reason}")
|
||||||
for show_key in show_output_keys:
|
continue
|
||||||
for provider_key, provider_value in show_key.items():
|
|
||||||
# Skip title
|
|
||||||
if provider_key.lower() == "title":
|
|
||||||
continue
|
|
||||||
if provider_key.lower() not in show_output_dict:
|
|
||||||
show_output_dict[provider_key.lower()] = [None] * show_counter
|
|
||||||
if provider_key.lower() == "locations":
|
|
||||||
show_output_dict[provider_key.lower()].append(provider_value)
|
|
||||||
else:
|
|
||||||
show_output_dict[provider_key.lower()].append(
|
|
||||||
provider_value.lower()
|
|
||||||
)
|
|
||||||
|
|
||||||
show_counter += 1
|
filtered_libaries.append(library)
|
||||||
for key in show_output_dict:
|
|
||||||
if len(show_output_dict[key]) < show_counter:
|
|
||||||
show_output_dict[key].append(None)
|
|
||||||
|
|
||||||
return show_output_dict
|
return filtered_libaries
|
||||||
except Exception:
|
|
||||||
logger("Skipping show_output_dict ", 1)
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
def episode_title_dict(user_list: dict):
|
def setup_libraries(
|
||||||
try:
|
server_1,
|
||||||
episode_output_dict = {}
|
server_2,
|
||||||
episode_output_dict["completed"] = []
|
blacklist_library: list[str],
|
||||||
episode_output_dict["time"] = []
|
blacklist_library_type: list[str],
|
||||||
episode_output_dict["locations"] = []
|
whitelist_library: list[str],
|
||||||
episode_output_dict["show"] = []
|
whitelist_library_type: list[str],
|
||||||
episode_output_dict["season"] = []
|
library_mapping: dict[str, str] | None = None,
|
||||||
episode_counter = 0 # Initialize a counter for the current episode position
|
) -> tuple[list[str], list[str]]:
|
||||||
|
server_1_libraries = server_1.get_libraries()
|
||||||
|
server_2_libraries = server_2.get_libraries()
|
||||||
|
|
||||||
# Iterate through the shows, seasons, and episodes in user_list
|
logger.debug(f"{server_1.server_type}: Libraries and types {server_1_libraries}")
|
||||||
for show in user_list:
|
logger.debug(f"{server_2.server_type}: Libraries and types {server_2_libraries}")
|
||||||
for season in user_list[show]:
|
|
||||||
for episode in user_list[show][season]:
|
|
||||||
# Add the show title to the episode_output_dict if it doesn't exist
|
|
||||||
if "show" not in episode_output_dict:
|
|
||||||
episode_output_dict["show"] = [None] * episode_counter
|
|
||||||
|
|
||||||
# Add the season number to the episode_output_dict if it doesn't exist
|
# Filter out all blacklist, whitelist libaries
|
||||||
if "season" not in episode_output_dict:
|
filtered_server_1_libraries = filter_libaries(
|
||||||
episode_output_dict["season"] = [None] * episode_counter
|
server_1_libraries,
|
||||||
|
blacklist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
)
|
||||||
|
filtered_server_2_libraries = filter_libaries(
|
||||||
|
server_2_libraries,
|
||||||
|
blacklist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
# Add the show title to the episode_output_dict
|
output_server_1_libaries = match_list(
|
||||||
episode_output_dict["show"].append(dict(show))
|
filtered_server_1_libraries, filtered_server_2_libraries, library_mapping
|
||||||
|
)
|
||||||
|
output_server_2_libaries = match_list(
|
||||||
|
filtered_server_2_libraries, filtered_server_1_libraries, library_mapping
|
||||||
|
)
|
||||||
|
|
||||||
# Add the season number to the episode_output_dict
|
return output_server_1_libaries, output_server_2_libaries
|
||||||
episode_output_dict["season"].append(season)
|
|
||||||
|
|
||||||
# Iterate through the keys and values in each episode
|
|
||||||
for episode_key, episode_value in episode.items():
|
|
||||||
# If the key is not "status", add the key to episode_output_dict if it doesn't exist
|
|
||||||
if episode_key != "status":
|
|
||||||
if episode_key.lower() not in episode_output_dict:
|
|
||||||
# Initialize the list with None values up to the current episode position
|
|
||||||
episode_output_dict[episode_key.lower()] = [
|
|
||||||
None
|
|
||||||
] * episode_counter
|
|
||||||
|
|
||||||
# If the key is "locations", append each location to the list
|
|
||||||
if episode_key == "locations":
|
|
||||||
episode_output_dict[episode_key.lower()].append(
|
|
||||||
episode_value
|
|
||||||
)
|
|
||||||
|
|
||||||
# If the key is "status", append the "completed" and "time" values
|
|
||||||
elif episode_key == "status":
|
|
||||||
episode_output_dict["completed"].append(
|
|
||||||
episode_value["completed"]
|
|
||||||
)
|
|
||||||
episode_output_dict["time"].append(episode_value["time"])
|
|
||||||
|
|
||||||
# For other keys, append the value to the list
|
|
||||||
else:
|
|
||||||
episode_output_dict[episode_key.lower()].append(
|
|
||||||
episode_value.lower()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Increment the episode_counter
|
|
||||||
episode_counter += 1
|
|
||||||
|
|
||||||
# Extend the lists in episode_output_dict with None values to match the current episode_counter
|
|
||||||
for key in episode_output_dict:
|
|
||||||
if len(episode_output_dict[key]) < episode_counter:
|
|
||||||
episode_output_dict[key].append(None)
|
|
||||||
|
|
||||||
return episode_output_dict
|
|
||||||
except Exception:
|
|
||||||
logger("Skipping episode_output_dict", 1)
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
def movies_title_dict(user_list: dict):
|
|
||||||
try:
|
|
||||||
movies_output_dict = {}
|
|
||||||
movies_output_dict["completed"] = []
|
|
||||||
movies_output_dict["time"] = []
|
|
||||||
movies_output_dict["locations"] = []
|
|
||||||
movie_counter = 0 # Initialize a counter for the current movie position
|
|
||||||
|
|
||||||
for movie in user_list:
|
|
||||||
for movie_key, movie_value in movie.items():
|
|
||||||
if movie_key != "status":
|
|
||||||
if movie_key.lower() not in movies_output_dict:
|
|
||||||
movies_output_dict[movie_key.lower()] = []
|
|
||||||
|
|
||||||
if movie_key == "locations":
|
|
||||||
movies_output_dict[movie_key.lower()].append(movie_value)
|
|
||||||
elif movie_key == "status":
|
|
||||||
movies_output_dict["completed"].append(movie_value["completed"])
|
|
||||||
movies_output_dict["time"].append(movie_value["time"])
|
|
||||||
else:
|
|
||||||
movies_output_dict[movie_key.lower()].append(movie_value.lower())
|
|
||||||
|
|
||||||
movie_counter += 1
|
|
||||||
for key in movies_output_dict:
|
|
||||||
if len(movies_output_dict[key]) < movie_counter:
|
|
||||||
movies_output_dict[key].append(None)
|
|
||||||
|
|
||||||
return movies_output_dict
|
|
||||||
except Exception:
|
|
||||||
logger("Skipping movies_output_dict failed", 1)
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
def generate_library_guids_dict(user_list: dict):
|
|
||||||
# Handle the case where user_list is empty or does not contain the expected keys and values
|
|
||||||
if not user_list:
|
|
||||||
return {}, {}, {}
|
|
||||||
|
|
||||||
show_output_dict = show_title_dict(user_list)
|
|
||||||
episode_output_dict = episode_title_dict(user_list)
|
|
||||||
movies_output_dict = movies_title_dict(user_list)
|
|
||||||
|
|
||||||
return show_output_dict, episode_output_dict, movies_output_dict
|
|
||||||
|
|||||||
417
src/main.py
417
src/main.py
@@ -1,288 +1,142 @@
|
|||||||
import os, traceback, json
|
import os
|
||||||
|
import traceback
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from time import sleep, perf_counter
|
from time import sleep, perf_counter
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
from src.emby import Emby
|
||||||
|
from src.jellyfin import Jellyfin
|
||||||
|
from src.plex import Plex
|
||||||
|
from src.library import setup_libraries
|
||||||
from src.functions import (
|
from src.functions import (
|
||||||
logger,
|
parse_string_to_list,
|
||||||
str_to_bool,
|
str_to_bool,
|
||||||
)
|
)
|
||||||
from src.users import (
|
from src.users import setup_users
|
||||||
generate_user_list,
|
|
||||||
combine_user_lists,
|
|
||||||
filter_user_lists,
|
|
||||||
generate_server_users,
|
|
||||||
)
|
|
||||||
from src.watched import (
|
from src.watched import (
|
||||||
cleanup_watched,
|
cleanup_watched,
|
||||||
)
|
)
|
||||||
from src.black_white import setup_black_white_lists
|
from src.black_white import setup_black_white_lists
|
||||||
|
from src.connection import generate_server_connections
|
||||||
from src.plex import Plex
|
|
||||||
from src.jellyfin import Jellyfin
|
|
||||||
|
|
||||||
load_dotenv(override=True)
|
load_dotenv(override=True)
|
||||||
|
|
||||||
|
log_file = os.getenv("LOG_FILE", os.getenv("LOGFILE", "log.log"))
|
||||||
def setup_users(
|
level = os.getenv("DEBUG_LEVEL", "INFO").upper()
|
||||||
server_1, server_2, blacklist_users, whitelist_users, user_mapping=None
|
|
||||||
):
|
|
||||||
server_1_users = generate_user_list(server_1)
|
|
||||||
server_2_users = generate_user_list(server_2)
|
|
||||||
logger(f"Server 1 users: {server_1_users}", 1)
|
|
||||||
logger(f"Server 2 users: {server_2_users}", 1)
|
|
||||||
|
|
||||||
users = combine_user_lists(server_1_users, server_2_users, user_mapping)
|
|
||||||
logger(f"User list that exist on both servers {users}", 1)
|
|
||||||
|
|
||||||
users_filtered = filter_user_lists(users, blacklist_users, whitelist_users)
|
|
||||||
logger(f"Filtered user list {users_filtered}", 1)
|
|
||||||
|
|
||||||
output_server_1_users = generate_server_users(server_1, users_filtered)
|
|
||||||
output_server_2_users = generate_server_users(server_2, users_filtered)
|
|
||||||
|
|
||||||
# Check if users is none or empty
|
|
||||||
if output_server_1_users is None or len(output_server_1_users) == 0:
|
|
||||||
logger(
|
|
||||||
f"No users found for server 1 {server_1[0]}, users: {server_1_users}, overlapping users {users}, filtered users {users_filtered}, server 1 users {server_1[1].users}"
|
|
||||||
)
|
|
||||||
|
|
||||||
if output_server_2_users is None or len(output_server_2_users) == 0:
|
|
||||||
logger(
|
|
||||||
f"No users found for server 2 {server_2[0]}, users: {server_2_users}, overlapping users {users} filtered users {users_filtered}, server 2 users {server_2[1].users}"
|
|
||||||
)
|
|
||||||
|
|
||||||
if (
|
|
||||||
output_server_1_users is None
|
|
||||||
or len(output_server_1_users) == 0
|
|
||||||
or output_server_2_users is None
|
|
||||||
or len(output_server_2_users) == 0
|
|
||||||
):
|
|
||||||
raise Exception("No users found for one or both servers")
|
|
||||||
|
|
||||||
logger(f"Server 1 users: {output_server_1_users}", 1)
|
|
||||||
logger(f"Server 2 users: {output_server_2_users}", 1)
|
|
||||||
|
|
||||||
return output_server_1_users, output_server_2_users
|
|
||||||
|
|
||||||
|
|
||||||
def generate_server_connections():
|
def configure_logger():
|
||||||
servers = []
|
# Remove default logger to configure our own
|
||||||
|
logger.remove()
|
||||||
|
|
||||||
plex_baseurl = os.getenv("PLEX_BASEURL", None)
|
# Choose log level based on environment
|
||||||
plex_token = os.getenv("PLEX_TOKEN", None)
|
# If in debug mode with a "debug" level, use DEBUG; otherwise, default to INFO.
|
||||||
plex_username = os.getenv("PLEX_USERNAME", None)
|
|
||||||
plex_password = os.getenv("PLEX_PASSWORD", None)
|
|
||||||
plex_servername = os.getenv("PLEX_SERVERNAME", None)
|
|
||||||
ssl_bypass = str_to_bool(os.getenv("SSL_BYPASS", "False"))
|
|
||||||
|
|
||||||
if plex_baseurl and plex_token:
|
if level not in ["INFO", "DEBUG", "TRACE"]:
|
||||||
plex_baseurl = plex_baseurl.split(",")
|
logger.add(sys.stdout)
|
||||||
plex_token = plex_token.split(",")
|
raise Exception("Invalid DEBUG_LEVEL, please choose between INFO, DEBUG, TRACE")
|
||||||
|
|
||||||
if len(plex_baseurl) != len(plex_token):
|
# Add a sink for file logging and the console.
|
||||||
raise Exception(
|
logger.add(log_file, level=level, mode="w")
|
||||||
"PLEX_BASEURL and PLEX_TOKEN must have the same number of entries"
|
logger.add(sys.stdout, level=level)
|
||||||
)
|
|
||||||
|
|
||||||
for i, url in enumerate(plex_baseurl):
|
|
||||||
server = Plex(
|
|
||||||
baseurl=url.strip(),
|
|
||||||
token=plex_token[i].strip(),
|
|
||||||
username=None,
|
|
||||||
password=None,
|
|
||||||
servername=None,
|
|
||||||
ssl_bypass=ssl_bypass,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger(f"Plex Server {i} info: {server.info()}", 3)
|
|
||||||
|
|
||||||
servers.append(
|
|
||||||
(
|
|
||||||
"plex",
|
|
||||||
server,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if plex_username and plex_password and plex_servername:
|
|
||||||
plex_username = plex_username.split(",")
|
|
||||||
plex_password = plex_password.split(",")
|
|
||||||
plex_servername = plex_servername.split(",")
|
|
||||||
|
|
||||||
if len(plex_username) != len(plex_password) or len(plex_username) != len(
|
|
||||||
plex_servername
|
|
||||||
):
|
|
||||||
raise Exception(
|
|
||||||
"PLEX_USERNAME, PLEX_PASSWORD and PLEX_SERVERNAME must have the same number of entries"
|
|
||||||
)
|
|
||||||
|
|
||||||
for i, username in enumerate(plex_username):
|
|
||||||
server = Plex(
|
|
||||||
baseurl=None,
|
|
||||||
token=None,
|
|
||||||
username=username.strip(),
|
|
||||||
password=plex_password[i].strip(),
|
|
||||||
servername=plex_servername[i].strip(),
|
|
||||||
ssl_bypass=ssl_bypass,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger(f"Plex Server {i} info: {server.info()}", 3)
|
|
||||||
servers.append(
|
|
||||||
(
|
|
||||||
"plex",
|
|
||||||
server,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
jellyfin_baseurl = os.getenv("JELLYFIN_BASEURL", None)
|
|
||||||
jellyfin_token = os.getenv("JELLYFIN_TOKEN", None)
|
|
||||||
|
|
||||||
if jellyfin_baseurl and jellyfin_token:
|
|
||||||
jellyfin_baseurl = jellyfin_baseurl.split(",")
|
|
||||||
jellyfin_token = jellyfin_token.split(",")
|
|
||||||
|
|
||||||
if len(jellyfin_baseurl) != len(jellyfin_token):
|
|
||||||
raise Exception(
|
|
||||||
"JELLYFIN_BASEURL and JELLYFIN_TOKEN must have the same number of entries"
|
|
||||||
)
|
|
||||||
|
|
||||||
for i, baseurl in enumerate(jellyfin_baseurl):
|
|
||||||
baseurl = baseurl.strip()
|
|
||||||
if baseurl[-1] == "/":
|
|
||||||
baseurl = baseurl[:-1]
|
|
||||||
|
|
||||||
server = Jellyfin(baseurl=baseurl, token=jellyfin_token[i].strip())
|
|
||||||
|
|
||||||
logger(f"Jellyfin Server {i} info: {server.info()}", 3)
|
|
||||||
servers.append(
|
|
||||||
(
|
|
||||||
"jellyfin",
|
|
||||||
server,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return servers
|
|
||||||
|
|
||||||
|
|
||||||
def get_server_watched(
|
def should_sync_server(
|
||||||
server_connection: list,
|
server_1: Plex | Jellyfin | Emby,
|
||||||
users: dict,
|
server_2: Plex | Jellyfin | Emby,
|
||||||
blacklist_library: list,
|
) -> bool:
|
||||||
whitelist_library: list,
|
|
||||||
blacklist_library_type: list,
|
|
||||||
whitelist_library_type: list,
|
|
||||||
library_mapping: dict,
|
|
||||||
):
|
|
||||||
if server_connection[0] == "plex":
|
|
||||||
return server_connection[1].get_watched(
|
|
||||||
users,
|
|
||||||
blacklist_library,
|
|
||||||
whitelist_library,
|
|
||||||
blacklist_library_type,
|
|
||||||
whitelist_library_type,
|
|
||||||
library_mapping,
|
|
||||||
)
|
|
||||||
elif server_connection[0] == "jellyfin":
|
|
||||||
return server_connection[1].get_watched(
|
|
||||||
users,
|
|
||||||
blacklist_library,
|
|
||||||
whitelist_library,
|
|
||||||
blacklist_library_type,
|
|
||||||
whitelist_library_type,
|
|
||||||
library_mapping,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def update_server_watched(
|
|
||||||
server_connection: list,
|
|
||||||
server_watched_filtered: dict,
|
|
||||||
user_mapping: dict,
|
|
||||||
library_mapping: dict,
|
|
||||||
dryrun: bool,
|
|
||||||
):
|
|
||||||
if server_connection[0] == "plex":
|
|
||||||
server_connection[1].update_watched(
|
|
||||||
server_watched_filtered, user_mapping, library_mapping, dryrun
|
|
||||||
)
|
|
||||||
elif server_connection[0] == "jellyfin":
|
|
||||||
server_connection[1].update_watched(
|
|
||||||
server_watched_filtered, user_mapping, library_mapping, dryrun
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def should_sync_server(server_1_type, server_2_type):
|
|
||||||
sync_from_plex_to_jellyfin = str_to_bool(
|
sync_from_plex_to_jellyfin = str_to_bool(
|
||||||
os.getenv("SYNC_FROM_PLEX_TO_JELLYFIN", "True")
|
os.getenv("SYNC_FROM_PLEX_TO_JELLYFIN", "True")
|
||||||
)
|
)
|
||||||
|
sync_from_plex_to_plex = str_to_bool(os.getenv("SYNC_FROM_PLEX_TO_PLEX", "True"))
|
||||||
|
sync_from_plex_to_emby = str_to_bool(os.getenv("SYNC_FROM_PLEX_TO_EMBY", "True"))
|
||||||
|
|
||||||
sync_from_jelly_to_plex = str_to_bool(
|
sync_from_jelly_to_plex = str_to_bool(
|
||||||
os.getenv("SYNC_FROM_JELLYFIN_TO_PLEX", "True")
|
os.getenv("SYNC_FROM_JELLYFIN_TO_PLEX", "True")
|
||||||
)
|
)
|
||||||
sync_from_plex_to_plex = str_to_bool(os.getenv("SYNC_FROM_PLEX_TO_PLEX", "True"))
|
|
||||||
sync_from_jelly_to_jellyfin = str_to_bool(
|
sync_from_jelly_to_jellyfin = str_to_bool(
|
||||||
os.getenv("SYNC_FROM_JELLYFIN_TO_JELLYFIN", "True")
|
os.getenv("SYNC_FROM_JELLYFIN_TO_JELLYFIN", "True")
|
||||||
)
|
)
|
||||||
|
sync_from_jelly_to_emby = str_to_bool(
|
||||||
|
os.getenv("SYNC_FROM_JELLYFIN_TO_EMBY", "True")
|
||||||
|
)
|
||||||
|
|
||||||
if (
|
sync_from_emby_to_plex = str_to_bool(os.getenv("SYNC_FROM_EMBY_TO_PLEX", "True"))
|
||||||
server_1_type == "plex"
|
sync_from_emby_to_jellyfin = str_to_bool(
|
||||||
and server_2_type == "plex"
|
os.getenv("SYNC_FROM_EMBY_TO_JELLYFIN", "True")
|
||||||
and not sync_from_plex_to_plex
|
)
|
||||||
):
|
sync_from_emby_to_emby = str_to_bool(os.getenv("SYNC_FROM_EMBY_TO_EMBY", "True"))
|
||||||
logger("Sync between plex and plex is disabled", 1)
|
|
||||||
return False
|
|
||||||
|
|
||||||
if (
|
if isinstance(server_1, Plex):
|
||||||
server_1_type == "plex"
|
if isinstance(server_2, Jellyfin) and not sync_from_plex_to_jellyfin:
|
||||||
and server_2_type == "jellyfin"
|
logger.info("Sync from plex -> jellyfin is disabled")
|
||||||
and not sync_from_jelly_to_plex
|
return False
|
||||||
):
|
|
||||||
logger("Sync from jellyfin to plex disabled", 1)
|
|
||||||
return False
|
|
||||||
|
|
||||||
if (
|
if isinstance(server_2, Emby) and not sync_from_plex_to_emby:
|
||||||
server_1_type == "jellyfin"
|
logger.info("Sync from plex -> emby is disabled")
|
||||||
and server_2_type == "jellyfin"
|
return False
|
||||||
and not sync_from_jelly_to_jellyfin
|
|
||||||
):
|
|
||||||
logger("Sync between jellyfin and jellyfin is disabled", 1)
|
|
||||||
return False
|
|
||||||
|
|
||||||
if (
|
if isinstance(server_2, Plex) and not sync_from_plex_to_plex:
|
||||||
server_1_type == "jellyfin"
|
logger.info("Sync from plex -> plex is disabled")
|
||||||
and server_2_type == "plex"
|
return False
|
||||||
and not sync_from_plex_to_jellyfin
|
|
||||||
):
|
if isinstance(server_1, Jellyfin):
|
||||||
logger("Sync from plex to jellyfin is disabled", 1)
|
if isinstance(server_2, Plex) and not sync_from_jelly_to_plex:
|
||||||
return False
|
logger.info("Sync from jellyfin -> plex is disabled")
|
||||||
|
return False
|
||||||
|
|
||||||
|
if isinstance(server_2, Jellyfin) and not sync_from_jelly_to_jellyfin:
|
||||||
|
logger.info("Sync from jellyfin -> jellyfin is disabled")
|
||||||
|
return False
|
||||||
|
|
||||||
|
if isinstance(server_2, Emby) and not sync_from_jelly_to_emby:
|
||||||
|
logger.info("Sync from jellyfin -> emby is disabled")
|
||||||
|
return False
|
||||||
|
|
||||||
|
if isinstance(server_1, Emby):
|
||||||
|
if isinstance(server_2, Plex) and not sync_from_emby_to_plex:
|
||||||
|
logger.info("Sync from emby -> plex is disabled")
|
||||||
|
return False
|
||||||
|
|
||||||
|
if isinstance(server_2, Jellyfin) and not sync_from_emby_to_jellyfin:
|
||||||
|
logger.info("Sync from emby -> jellyfin is disabled")
|
||||||
|
return False
|
||||||
|
|
||||||
|
if isinstance(server_2, Emby) and not sync_from_emby_to_emby:
|
||||||
|
logger.info("Sync from emby -> emby is disabled")
|
||||||
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def main_loop():
|
def main_loop():
|
||||||
logfile = os.getenv("LOGFILE", "log.log")
|
|
||||||
# Delete logfile if it exists
|
|
||||||
if os.path.exists(logfile):
|
|
||||||
os.remove(logfile)
|
|
||||||
|
|
||||||
dryrun = str_to_bool(os.getenv("DRYRUN", "False"))
|
dryrun = str_to_bool(os.getenv("DRYRUN", "False"))
|
||||||
logger(f"Dryrun: {dryrun}", 1)
|
logger.info(f"Dryrun: {dryrun}")
|
||||||
|
|
||||||
user_mapping = os.getenv("USER_MAPPING")
|
user_mapping = os.getenv("USER_MAPPING", None)
|
||||||
if user_mapping:
|
if user_mapping:
|
||||||
user_mapping = json.loads(user_mapping.lower())
|
user_mapping = json.loads(user_mapping.lower())
|
||||||
logger(f"User Mapping: {user_mapping}", 1)
|
logger.info(f"User Mapping: {user_mapping}")
|
||||||
|
|
||||||
library_mapping = os.getenv("LIBRARY_MAPPING")
|
library_mapping = os.getenv("LIBRARY_MAPPING", None)
|
||||||
if library_mapping:
|
if library_mapping:
|
||||||
library_mapping = json.loads(library_mapping)
|
library_mapping = json.loads(library_mapping)
|
||||||
logger(f"Library Mapping: {library_mapping}", 1)
|
logger.info(f"Library Mapping: {library_mapping}")
|
||||||
|
|
||||||
# Create (black/white)lists
|
# Create (black/white)lists
|
||||||
logger("Creating (black/white)lists", 1)
|
logger.info("Creating (black/white)lists")
|
||||||
blacklist_library = os.getenv("BLACKLIST_LIBRARY", None)
|
blacklist_library = parse_string_to_list(os.getenv("BLACKLIST_LIBRARY", None))
|
||||||
whitelist_library = os.getenv("WHITELIST_LIBRARY", None)
|
whitelist_library = parse_string_to_list(os.getenv("WHITELIST_LIBRARY", None))
|
||||||
blacklist_library_type = os.getenv("BLACKLIST_LIBRARY_TYPE", None)
|
blacklist_library_type = parse_string_to_list(
|
||||||
whitelist_library_type = os.getenv("WHITELIST_LIBRARY_TYPE", None)
|
os.getenv("BLACKLIST_LIBRARY_TYPE", None)
|
||||||
blacklist_users = os.getenv("BLACKLIST_USERS", None)
|
)
|
||||||
whitelist_users = os.getenv("WHITELIST_USERS", None)
|
whitelist_library_type = parse_string_to_list(
|
||||||
|
os.getenv("WHITELIST_LIBRARY_TYPE", None)
|
||||||
|
)
|
||||||
|
blacklist_users = parse_string_to_list(os.getenv("BLACKLIST_USERS", None))
|
||||||
|
whitelist_users = parse_string_to_list(os.getenv("WHITELIST_USERS", None))
|
||||||
|
|
||||||
(
|
(
|
||||||
blacklist_library,
|
blacklist_library,
|
||||||
@@ -303,7 +157,7 @@ def main_loop():
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Create server connections
|
# Create server connections
|
||||||
logger("Creating server connections", 1)
|
logger.info("Creating server connections")
|
||||||
servers = generate_server_connections()
|
servers = generate_server_connections()
|
||||||
|
|
||||||
for server_1 in servers:
|
for server_1 in servers:
|
||||||
@@ -313,70 +167,72 @@ def main_loop():
|
|||||||
|
|
||||||
# Start server_2 at the next server in the list
|
# Start server_2 at the next server in the list
|
||||||
for server_2 in servers[servers.index(server_1) + 1 :]:
|
for server_2 in servers[servers.index(server_1) + 1 :]:
|
||||||
logger(f"Server 1: {server_1[0].capitalize()}: {server_1[1].info()}", 0)
|
# Check if server 1 and server 2 are going to be synced in either direction, skip if not
|
||||||
logger(f"Server 2: {server_2[0].capitalize()}: {server_2[1].info()}", 0)
|
if not should_sync_server(server_1, server_2) and not should_sync_server(
|
||||||
|
server_2, server_1
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
logger.info(f"Server 1: {type(server_1)}: {server_1.info()}")
|
||||||
|
logger.info(f"Server 2: {type(server_2)}: {server_2.info()}")
|
||||||
|
|
||||||
# Create users list
|
# Create users list
|
||||||
logger("Creating users list", 1)
|
logger.info("Creating users list")
|
||||||
server_1_users, server_2_users = setup_users(
|
server_1_users, server_2_users = setup_users(
|
||||||
server_1, server_2, blacklist_users, whitelist_users, user_mapping
|
server_1, server_2, blacklist_users, whitelist_users, user_mapping
|
||||||
)
|
)
|
||||||
|
|
||||||
logger("Creating watched lists", 1)
|
server_1_libraries, server_2_libraries = setup_libraries(
|
||||||
server_1_watched = get_server_watched(
|
|
||||||
server_1,
|
server_1,
|
||||||
server_1_users,
|
|
||||||
blacklist_library,
|
|
||||||
whitelist_library,
|
|
||||||
blacklist_library_type,
|
|
||||||
whitelist_library_type,
|
|
||||||
library_mapping,
|
|
||||||
)
|
|
||||||
logger("Finished creating watched list server 1", 1)
|
|
||||||
server_2_watched = get_server_watched(
|
|
||||||
server_2,
|
server_2,
|
||||||
server_2_users,
|
|
||||||
blacklist_library,
|
blacklist_library,
|
||||||
whitelist_library,
|
|
||||||
blacklist_library_type,
|
blacklist_library_type,
|
||||||
|
whitelist_library,
|
||||||
whitelist_library_type,
|
whitelist_library_type,
|
||||||
library_mapping,
|
library_mapping,
|
||||||
)
|
)
|
||||||
logger("Finished creating watched list server 2", 1)
|
logger.info(f"Server 1 syncing libraries: {server_1_libraries}")
|
||||||
logger(f"Server 1 watched: {server_1_watched}", 3)
|
logger.info(f"Server 2 syncing libraries: {server_2_libraries}")
|
||||||
logger(f"Server 2 watched: {server_2_watched}", 3)
|
|
||||||
|
|
||||||
logger("Cleaning Server 1 Watched", 1)
|
logger.info("Creating watched lists", 1)
|
||||||
|
server_1_watched = server_1.get_watched(server_1_users, server_1_libraries)
|
||||||
|
logger.info("Finished creating watched list server 1")
|
||||||
|
|
||||||
|
server_2_watched = server_2.get_watched(server_2_users, server_2_libraries)
|
||||||
|
logger.info("Finished creating watched list server 2")
|
||||||
|
|
||||||
|
logger.debug(f"Server 1 watched: {server_1_watched}")
|
||||||
|
logger.debug(f"Server 2 watched: {server_2_watched}")
|
||||||
|
|
||||||
|
logger.info("Cleaning Server 1 Watched", 1)
|
||||||
server_1_watched_filtered = cleanup_watched(
|
server_1_watched_filtered = cleanup_watched(
|
||||||
server_1_watched, server_2_watched, user_mapping, library_mapping
|
server_1_watched, server_2_watched, user_mapping, library_mapping
|
||||||
)
|
)
|
||||||
|
|
||||||
logger("Cleaning Server 2 Watched", 1)
|
logger.info("Cleaning Server 2 Watched", 1)
|
||||||
server_2_watched_filtered = cleanup_watched(
|
server_2_watched_filtered = cleanup_watched(
|
||||||
server_2_watched, server_1_watched, user_mapping, library_mapping
|
server_2_watched, server_1_watched, user_mapping, library_mapping
|
||||||
)
|
)
|
||||||
|
|
||||||
logger(
|
logger.debug(
|
||||||
f"server 1 watched that needs to be synced to server 2:\n{server_1_watched_filtered}",
|
f"server 1 watched that needs to be synced to server 2:\n{server_1_watched_filtered}",
|
||||||
1,
|
|
||||||
)
|
)
|
||||||
logger(
|
logger.debug(
|
||||||
f"server 2 watched that needs to be synced to server 1:\n{server_2_watched_filtered}",
|
f"server 2 watched that needs to be synced to server 1:\n{server_2_watched_filtered}",
|
||||||
1,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if should_sync_server(server_1[0], server_2[0]):
|
if should_sync_server(server_2, server_1):
|
||||||
update_server_watched(
|
logger.info(f"Syncing {server_2.info()} -> {server_1.info()}")
|
||||||
server_1,
|
server_1.update_watched(
|
||||||
server_2_watched_filtered,
|
server_2_watched_filtered,
|
||||||
user_mapping,
|
user_mapping,
|
||||||
library_mapping,
|
library_mapping,
|
||||||
dryrun,
|
dryrun,
|
||||||
)
|
)
|
||||||
|
|
||||||
if should_sync_server(server_2[0], server_1[0]):
|
if should_sync_server(server_1, server_2):
|
||||||
update_server_watched(
|
logger.info(f"Syncing {server_1.info()} -> {server_2.info()}")
|
||||||
server_2,
|
server_2.update_watched(
|
||||||
server_1_watched_filtered,
|
server_1_watched_filtered,
|
||||||
user_mapping,
|
user_mapping,
|
||||||
library_mapping,
|
library_mapping,
|
||||||
@@ -384,43 +240,46 @@ def main_loop():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@logger.catch
|
||||||
def main():
|
def main():
|
||||||
run_only_once = str_to_bool(os.getenv("RUN_ONLY_ONCE", "False"))
|
run_only_once = str_to_bool(os.getenv("RUN_ONLY_ONCE", "False"))
|
||||||
sleep_duration = float(os.getenv("SLEEP_DURATION", "3600"))
|
sleep_duration = float(os.getenv("SLEEP_DURATION", "3600"))
|
||||||
times = []
|
times: list[float] = []
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
start = perf_counter()
|
start = perf_counter()
|
||||||
|
# Reconfigure the logger on each loop so the logs are rotated on each run
|
||||||
|
configure_logger()
|
||||||
main_loop()
|
main_loop()
|
||||||
end = perf_counter()
|
end = perf_counter()
|
||||||
times.append(end - start)
|
times.append(end - start)
|
||||||
|
|
||||||
if len(times) > 0:
|
if len(times) > 0:
|
||||||
logger(f"Average time: {sum(times) / len(times)}", 0)
|
logger.info(f"Average time: {sum(times) / len(times)}")
|
||||||
|
|
||||||
if run_only_once:
|
if run_only_once:
|
||||||
break
|
break
|
||||||
|
|
||||||
logger(f"Looping in {sleep_duration}")
|
logger.info(f"Looping in {sleep_duration}")
|
||||||
sleep(sleep_duration)
|
sleep(sleep_duration)
|
||||||
|
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
if isinstance(error, list):
|
if isinstance(error, list):
|
||||||
for message in error:
|
for message in error:
|
||||||
logger(message, log_type=2)
|
logger.error(message)
|
||||||
else:
|
else:
|
||||||
logger(error, log_type=2)
|
logger.error(error)
|
||||||
|
|
||||||
logger(traceback.format_exc(), 2)
|
logger.error(traceback.format_exc())
|
||||||
|
|
||||||
if run_only_once:
|
if run_only_once:
|
||||||
break
|
break
|
||||||
|
|
||||||
logger(f"Retrying in {sleep_duration}", log_type=0)
|
logger.info(f"Retrying in {sleep_duration}")
|
||||||
sleep(sleep_duration)
|
sleep(sleep_duration)
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
if len(times) > 0:
|
if len(times) > 0:
|
||||||
logger(f"Average time: {sum(times) / len(times)}", 0)
|
logger.info(f"Average time: {sum(times) / len(times)}")
|
||||||
logger("Exiting", log_type=0)
|
logger.info("Exiting")
|
||||||
os._exit(0)
|
os._exit(0)
|
||||||
|
|||||||
714
src/plex.py
714
src/plex.py
@@ -1,6 +1,7 @@
|
|||||||
import os, requests, traceback
|
import os
|
||||||
|
import requests
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
from typing import Dict, Union, FrozenSet
|
from loguru import logger
|
||||||
|
|
||||||
from urllib3.poolmanager import PoolManager
|
from urllib3.poolmanager import PoolManager
|
||||||
from math import floor
|
from math import floor
|
||||||
@@ -12,19 +13,20 @@ from plexapi.server import PlexServer
|
|||||||
from plexapi.myplex import MyPlexAccount
|
from plexapi.myplex import MyPlexAccount
|
||||||
|
|
||||||
from src.functions import (
|
from src.functions import (
|
||||||
logger,
|
|
||||||
search_mapping,
|
search_mapping,
|
||||||
future_thread_executor,
|
|
||||||
contains_nested,
|
|
||||||
log_marked,
|
log_marked,
|
||||||
str_to_bool,
|
str_to_bool,
|
||||||
)
|
)
|
||||||
from src.library import (
|
from src.watched import (
|
||||||
check_skip_logic,
|
LibraryData,
|
||||||
generate_library_guids_dict,
|
MediaIdentifiers,
|
||||||
|
MediaItem,
|
||||||
|
WatchedStatus,
|
||||||
|
Series,
|
||||||
|
UserData,
|
||||||
|
check_same_identifiers,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
load_dotenv(override=True)
|
load_dotenv(override=True)
|
||||||
|
|
||||||
generate_guids = str_to_bool(os.getenv("GENERATE_GUIDS", "True"))
|
generate_guids = str_to_bool(os.getenv("GENERATE_GUIDS", "True"))
|
||||||
@@ -43,368 +45,171 @@ class HostNameIgnoringAdapter(RequestsHTTPAdapter):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def extract_guids_from_item(item: Union[Movie, Show, Episode]) -> Dict[str, str]:
|
def extract_guids_from_item(item: Movie | Show | Episode) -> dict[str, str]:
|
||||||
# If GENERATE_GUIDS is set to False, then return an empty dict
|
# If GENERATE_GUIDS is set to False, then return an empty dict
|
||||||
if not generate_guids:
|
if not generate_guids:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
guids: Dict[str, str] = dict(
|
guids: dict[str, str] = dict(
|
||||||
guid.id.split("://")
|
guid.id.split("://")
|
||||||
for guid in item.guids
|
for guid in item.guids
|
||||||
if guid.id is not None and len(guid.id.strip()) > 0
|
if guid.id is not None and len(guid.id.strip()) > 0
|
||||||
)
|
)
|
||||||
|
|
||||||
if len(guids) == 0:
|
|
||||||
logger(
|
|
||||||
f"Plex: Failed to get any guids for {item.title}",
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
|
|
||||||
return guids
|
return guids
|
||||||
|
|
||||||
|
|
||||||
def get_guids(item: Union[Movie, Episode], completed=True):
|
def extract_identifiers_from_item(item: Movie | Show | Episode) -> MediaIdentifiers:
|
||||||
if not item.locations:
|
guids = extract_guids_from_item(item)
|
||||||
logger(
|
|
||||||
f"Plex: {item.title} has no locations",
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not item.guids:
|
return MediaIdentifiers(
|
||||||
logger(
|
title=item.title,
|
||||||
f"Plex: {item.title} has no guids",
|
locations=(
|
||||||
1,
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"title": item.title,
|
|
||||||
"locations": (
|
|
||||||
tuple([location.split("/")[-1] for location in item.locations])
|
tuple([location.split("/")[-1] for location in item.locations])
|
||||||
if generate_locations
|
if generate_locations
|
||||||
else tuple()
|
else tuple()
|
||||||
),
|
),
|
||||||
"status": {
|
imdb_id=guids.get("imdb", None),
|
||||||
"completed": completed,
|
tvdb_id=guids.get("tvdb", None),
|
||||||
"time": item.viewOffset,
|
tmdb_id=guids.get("tmdb", None),
|
||||||
},
|
)
|
||||||
} | extract_guids_from_item(
|
|
||||||
item
|
|
||||||
) # Merge the metadata and guid dictionaries
|
|
||||||
|
|
||||||
|
|
||||||
def get_user_library_watched_show(show, process_episodes, threads=None):
|
def get_mediaitem(item: Movie | Episode, completed=True) -> MediaItem:
|
||||||
|
return MediaItem(
|
||||||
|
identifiers=extract_identifiers_from_item(item),
|
||||||
|
status=WatchedStatus(completed=completed, time=item.viewOffset),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def update_user_watched(
|
||||||
|
user: MyPlexAccount,
|
||||||
|
user_plex: PlexServer,
|
||||||
|
library_data: LibraryData,
|
||||||
|
library_name: str,
|
||||||
|
dryrun: bool,
|
||||||
|
):
|
||||||
try:
|
try:
|
||||||
show_guids: FrozenSet = frozenset(
|
# If there are no movies or shows to update, exit early.
|
||||||
(
|
if not library_data.series and not library_data.movies:
|
||||||
{
|
return
|
||||||
"title": show.title,
|
|
||||||
"locations": (
|
logger.info(
|
||||||
tuple([location.split("/")[-1] for location in show.locations])
|
f"Plex: Updating watched for {user.title} in library {library_name}"
|
||||||
if generate_locations
|
|
||||||
else tuple()
|
|
||||||
),
|
|
||||||
}
|
|
||||||
| extract_guids_from_item(show)
|
|
||||||
).items() # Merge the metadata and guid dictionaries
|
|
||||||
)
|
)
|
||||||
|
library_section = user_plex.library.section(library_name)
|
||||||
|
|
||||||
episode_guids_args = []
|
# Update movies.
|
||||||
|
if library_data.movies:
|
||||||
for episode in process_episodes:
|
# Search for Plex movies that are currently marked as unwatched.
|
||||||
episode_guids_args.append([get_guids, episode, episode.isWatched])
|
for plex_movie in library_section.search(unwatched=True):
|
||||||
|
plex_identifiers = extract_identifiers_from_item(plex_movie)
|
||||||
episode_guids_results = future_thread_executor(
|
# Check each stored movie for a match.
|
||||||
episode_guids_args, threads=threads
|
for stored_movie in library_data.movies:
|
||||||
)
|
if check_same_identifiers(
|
||||||
|
plex_identifiers, stored_movie.identifiers
|
||||||
episode_guids = {}
|
):
|
||||||
for index, episode in enumerate(process_episodes):
|
# If the stored movie is marked as watched (or has enough progress),
|
||||||
if episode.parentIndex not in episode_guids:
|
# update the Plex movie accordingly.
|
||||||
episode_guids[episode.parentIndex] = []
|
if stored_movie.status.completed:
|
||||||
episode_guids[episode.parentIndex].append(episode_guids_results[index])
|
msg = f"Plex: {plex_movie.title} as watched for {user.title} in {library_name}"
|
||||||
|
if not dryrun:
|
||||||
return show_guids, episode_guids
|
plex_movie.markWatched()
|
||||||
except Exception:
|
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
|
||||||
return {}, {}
|
log_marked(
|
||||||
|
"Plex",
|
||||||
|
user_plex.friendlyName,
|
||||||
def get_user_library_watched(user, user_plex, library):
|
user.title,
|
||||||
user_name: str = user.username.lower() if user.username else user.title.lower()
|
library_name,
|
||||||
try:
|
plex_movie.title,
|
||||||
logger(
|
None,
|
||||||
f"Plex: Generating watched for {user_name} in library {library.title}",
|
None,
|
||||||
0,
|
|
||||||
)
|
|
||||||
|
|
||||||
library_videos = user_plex.library.section(library.title)
|
|
||||||
|
|
||||||
if library.type == "movie":
|
|
||||||
watched = []
|
|
||||||
|
|
||||||
args = [
|
|
||||||
[get_guids, video, video.isWatched]
|
|
||||||
for video in library_videos.search(unwatched=False)
|
|
||||||
+ library_videos.search(inProgress=True)
|
|
||||||
if video.isWatched or video.viewOffset >= 60000
|
|
||||||
]
|
|
||||||
|
|
||||||
for guid in future_thread_executor(args, threads=len(args)):
|
|
||||||
logger(f"Plex: Adding {guid['title']} to {user_name} watched list", 3)
|
|
||||||
watched.append(guid)
|
|
||||||
elif library.type == "show":
|
|
||||||
watched = {}
|
|
||||||
|
|
||||||
# Get all watched shows and partially watched shows
|
|
||||||
parallel_show_task = []
|
|
||||||
parallel_episodes_task = []
|
|
||||||
|
|
||||||
for show in library_videos.search(unwatched=False) + library_videos.search(
|
|
||||||
inProgress=True
|
|
||||||
):
|
|
||||||
process_episodes = []
|
|
||||||
for episode in show.episodes():
|
|
||||||
if episode.isWatched or episode.viewOffset >= 60000:
|
|
||||||
process_episodes.append(episode)
|
|
||||||
|
|
||||||
# Shows with more than 24 episodes has its episodes processed in parallel
|
|
||||||
# Shows with less than 24 episodes has its episodes processed in serial but the shows are processed in parallel
|
|
||||||
if len(process_episodes) >= 24:
|
|
||||||
parallel_episodes_task.append(
|
|
||||||
[
|
|
||||||
get_user_library_watched_show,
|
|
||||||
show,
|
|
||||||
process_episodes,
|
|
||||||
len(process_episodes),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
parallel_show_task.append(
|
|
||||||
[get_user_library_watched_show, show, process_episodes, 1]
|
|
||||||
)
|
|
||||||
|
|
||||||
for show_guids, episode_guids in future_thread_executor(
|
|
||||||
parallel_show_task, threads=len(parallel_show_task)
|
|
||||||
) + future_thread_executor(parallel_episodes_task, threads=1):
|
|
||||||
if show_guids and episode_guids:
|
|
||||||
watched[show_guids] = episode_guids
|
|
||||||
logger(
|
|
||||||
f"Plex: Added {episode_guids} to {user_name} {show_guids} watched list",
|
|
||||||
3,
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
watched = None
|
|
||||||
|
|
||||||
logger(f"Plex: Got watched for {user_name} in library {library.title}", 1)
|
|
||||||
logger(f"Plex: {watched}", 3)
|
|
||||||
|
|
||||||
return {user_name: {library.title: watched} if watched is not None else {}}
|
|
||||||
except Exception as e:
|
|
||||||
logger(
|
|
||||||
f"Plex: Failed to get watched for {user_name} in library {library.title}, Error: {e}",
|
|
||||||
2,
|
|
||||||
)
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
def find_video(plex_search, video_ids, videos=None):
|
|
||||||
try:
|
|
||||||
if not generate_guids and not generate_locations:
|
|
||||||
return False, []
|
|
||||||
|
|
||||||
if generate_locations:
|
|
||||||
for location in plex_search.locations:
|
|
||||||
if (
|
|
||||||
contains_nested(location.split("/")[-1], video_ids["locations"])
|
|
||||||
is not None
|
|
||||||
):
|
|
||||||
episode_videos = []
|
|
||||||
if videos:
|
|
||||||
for show, seasons in videos.items():
|
|
||||||
show = {k: v for k, v in show}
|
|
||||||
if (
|
|
||||||
contains_nested(
|
|
||||||
location.split("/")[-1], show["locations"]
|
|
||||||
)
|
|
||||||
is not None
|
|
||||||
):
|
|
||||||
for season in seasons.values():
|
|
||||||
for episode in season:
|
|
||||||
episode_videos.append(episode)
|
|
||||||
|
|
||||||
return True, episode_videos
|
|
||||||
|
|
||||||
if generate_guids:
|
|
||||||
for guid in plex_search.guids:
|
|
||||||
guid_source, guid_id = guid.id.split("://")
|
|
||||||
|
|
||||||
# If show provider source and show provider id are in videos_shows_ids exactly, then the show is in the list
|
|
||||||
if guid_source in video_ids.keys():
|
|
||||||
if guid_id in video_ids[guid_source]:
|
|
||||||
episode_videos = []
|
|
||||||
if videos:
|
|
||||||
for show, seasons in videos.items():
|
|
||||||
show = {k: v for k, v in show}
|
|
||||||
if guid_source in show.keys():
|
|
||||||
if guid_id == show[guid_source]:
|
|
||||||
for season in seasons.values():
|
|
||||||
for episode in season:
|
|
||||||
episode_videos.append(episode)
|
|
||||||
|
|
||||||
return True, episode_videos
|
|
||||||
|
|
||||||
return False, []
|
|
||||||
except Exception:
|
|
||||||
return False, []
|
|
||||||
|
|
||||||
|
|
||||||
def get_video_status(plex_search, video_ids, videos):
|
|
||||||
try:
|
|
||||||
if not generate_guids and not generate_locations:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if generate_locations:
|
|
||||||
for location in plex_search.locations:
|
|
||||||
if (
|
|
||||||
contains_nested(location.split("/")[-1], video_ids["locations"])
|
|
||||||
is not None
|
|
||||||
):
|
|
||||||
for video in videos:
|
|
||||||
if (
|
|
||||||
contains_nested(location.split("/")[-1], video["locations"])
|
|
||||||
is not None
|
|
||||||
):
|
|
||||||
return video["status"]
|
|
||||||
|
|
||||||
if generate_guids:
|
|
||||||
for guid in plex_search.guids:
|
|
||||||
guid_source, guid_id = guid.id.split("://")
|
|
||||||
|
|
||||||
# If show provider source and show provider id are in videos_shows_ids exactly, then the show is in the list
|
|
||||||
if guid_source in video_ids.keys():
|
|
||||||
if guid_id in video_ids[guid_source]:
|
|
||||||
for video in videos:
|
|
||||||
if guid_source in video.keys():
|
|
||||||
if guid_id == video[guid_source]:
|
|
||||||
return video["status"]
|
|
||||||
|
|
||||||
return None
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def update_user_watched(user, user_plex, library, videos, dryrun):
|
|
||||||
try:
|
|
||||||
logger(f"Plex: Updating watched for {user.title} in library {library}", 1)
|
|
||||||
(
|
|
||||||
videos_shows_ids,
|
|
||||||
videos_episodes_ids,
|
|
||||||
videos_movies_ids,
|
|
||||||
) = generate_library_guids_dict(videos)
|
|
||||||
logger(
|
|
||||||
f"Plex: mark list\nShows: {videos_shows_ids}\nEpisodes: {videos_episodes_ids}\nMovies: {videos_movies_ids}",
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
|
|
||||||
library_videos = user_plex.library.section(library)
|
|
||||||
if videos_movies_ids:
|
|
||||||
for movies_search in library_videos.search(unwatched=True):
|
|
||||||
video_status = get_video_status(
|
|
||||||
movies_search, videos_movies_ids, videos
|
|
||||||
)
|
|
||||||
if video_status:
|
|
||||||
if video_status["completed"]:
|
|
||||||
msg = f"Plex: {movies_search.title} as watched for {user.title} in {library}"
|
|
||||||
if not dryrun:
|
|
||||||
logger(msg, 5)
|
|
||||||
movies_search.markWatched()
|
|
||||||
else:
|
|
||||||
logger(msg, 6)
|
|
||||||
|
|
||||||
log_marked(user.title, library, movies_search.title, None, None)
|
|
||||||
elif video_status["time"] > 60_000:
|
|
||||||
msg = f"Plex: {movies_search.title} as partially watched for {floor(video_status['time'] / 60_000)} minutes for {user.title} in {library}"
|
|
||||||
if not dryrun:
|
|
||||||
logger(msg, 5)
|
|
||||||
movies_search.updateTimeline(video_status["time"])
|
|
||||||
else:
|
|
||||||
logger(msg, 6)
|
|
||||||
|
|
||||||
log_marked(
|
|
||||||
user.title,
|
|
||||||
library,
|
|
||||||
movies_search.title,
|
|
||||||
duration=video_status["time"],
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger(
|
|
||||||
f"Plex: Skipping movie {movies_search.title} as it is not in mark list for {user.title}",
|
|
||||||
1,
|
|
||||||
)
|
|
||||||
|
|
||||||
if videos_shows_ids and videos_episodes_ids:
|
|
||||||
for show_search in library_videos.search(unwatched=True):
|
|
||||||
show_found, episode_videos = find_video(
|
|
||||||
show_search, videos_shows_ids, videos
|
|
||||||
)
|
|
||||||
if show_found:
|
|
||||||
for episode_search in show_search.episodes():
|
|
||||||
video_status = get_video_status(
|
|
||||||
episode_search, videos_episodes_ids, episode_videos
|
|
||||||
)
|
|
||||||
if video_status:
|
|
||||||
if video_status["completed"]:
|
|
||||||
msg = f"Plex: {show_search.title} {episode_search.title} as watched for {user.title} in {library}"
|
|
||||||
if not dryrun:
|
|
||||||
logger(msg, 5)
|
|
||||||
episode_search.markWatched()
|
|
||||||
else:
|
|
||||||
logger(msg, 6)
|
|
||||||
|
|
||||||
log_marked(
|
|
||||||
user.title,
|
|
||||||
library,
|
|
||||||
show_search.title,
|
|
||||||
episode_search.title,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
msg = f"Plex: {show_search.title} {episode_search.title} as partially watched for {floor(video_status['time'] / 60_000)} minutes for {user.title} in {library}"
|
|
||||||
if not dryrun:
|
|
||||||
logger(msg, 5)
|
|
||||||
episode_search.updateTimeline(video_status["time"])
|
|
||||||
else:
|
|
||||||
logger(msg, 6)
|
|
||||||
|
|
||||||
log_marked(
|
|
||||||
user.title,
|
|
||||||
library,
|
|
||||||
show_search.title,
|
|
||||||
episode_search.title,
|
|
||||||
video_status["time"],
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
logger(
|
|
||||||
f"Plex: Skipping episode {episode_search.title} as it is not in mark list for {user.title}",
|
|
||||||
3,
|
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
logger(
|
msg = f"Plex: {plex_movie.title} as partially watched for {floor(stored_movie.status.time / 60_000)} minutes for {user.title} in {library_name}"
|
||||||
f"Plex: Skipping show {show_search.title} as it is not in mark list for {user.title}",
|
if not dryrun:
|
||||||
3,
|
plex_movie.updateTimeline(stored_movie.status.time)
|
||||||
)
|
|
||||||
|
|
||||||
if not videos_movies_ids and not videos_shows_ids and not videos_episodes_ids:
|
logger.success(f"{'[DRYRUN] ' if dryrun else ''}{msg}")
|
||||||
logger(
|
log_marked(
|
||||||
f"Jellyfin: No videos to mark as watched for {user.title} in library {library}",
|
"Plex",
|
||||||
1,
|
user_plex.friendlyName,
|
||||||
)
|
user.title,
|
||||||
|
library_name,
|
||||||
|
plex_movie.title,
|
||||||
|
duration=stored_movie.status.time,
|
||||||
|
)
|
||||||
|
# Once matched, no need to check further.
|
||||||
|
break
|
||||||
|
|
||||||
|
# Update TV Shows (series/episodes).
|
||||||
|
if library_data.series:
|
||||||
|
# For each Plex show in the library section:
|
||||||
|
plex_shows = library_section.search(unwatched=True)
|
||||||
|
for plex_show in plex_shows:
|
||||||
|
# Extract identifiers from the Plex show.
|
||||||
|
plex_show_identifiers = extract_identifiers_from_item(plex_show)
|
||||||
|
# Try to find a matching series in your stored library.
|
||||||
|
for stored_series in library_data.series:
|
||||||
|
if check_same_identifiers(
|
||||||
|
plex_show_identifiers, stored_series.identifiers
|
||||||
|
):
|
||||||
|
logger.info(f"Found matching show for '{plex_show.title}'")
|
||||||
|
# Now update episodes.
|
||||||
|
# Get the list of Plex episodes for this show.
|
||||||
|
plex_episodes = plex_show.episodes()
|
||||||
|
for plex_episode in plex_episodes:
|
||||||
|
plex_episode_identifiers = extract_identifiers_from_item(
|
||||||
|
plex_episode
|
||||||
|
)
|
||||||
|
for stored_ep in stored_series.episodes:
|
||||||
|
if check_same_identifiers(
|
||||||
|
plex_episode_identifiers, stored_ep.identifiers
|
||||||
|
):
|
||||||
|
if stored_ep.status.completed:
|
||||||
|
msg = f"Plex: {plex_show.title} {plex_episode.title} as watched for {user.title} in {library_name}"
|
||||||
|
if not dryrun:
|
||||||
|
plex_episode.markWatched()
|
||||||
|
|
||||||
|
logger.success(
|
||||||
|
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
|
||||||
|
)
|
||||||
|
log_marked(
|
||||||
|
"Plex",
|
||||||
|
user_plex.friendlyName,
|
||||||
|
user.title,
|
||||||
|
library_name,
|
||||||
|
plex_show.title,
|
||||||
|
plex_episode.title,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
msg = f"Plex: {plex_show.title} {plex_episode.title} as partially watched for {floor(stored_ep.status.time / 60_000)} minutes for {user.title} in {library_name}"
|
||||||
|
if not dryrun:
|
||||||
|
plex_episode.updateTimeline(
|
||||||
|
stored_ep.status.time
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.success(
|
||||||
|
f"{'[DRYRUN] ' if dryrun else ''}{msg}"
|
||||||
|
)
|
||||||
|
log_marked(
|
||||||
|
"Plex",
|
||||||
|
user_plex.friendlyName,
|
||||||
|
user.title,
|
||||||
|
library_name,
|
||||||
|
plex_show.title,
|
||||||
|
plex_episode.title,
|
||||||
|
stored_ep.status.time,
|
||||||
|
)
|
||||||
|
break # Found a matching episode.
|
||||||
|
break # Found a matching show.
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger(
|
logger.error(
|
||||||
f"Plex: Failed to update watched for {user.title} in library {library}, Error: {e}",
|
f"Plex: Failed to update watched for {user.title} in library {library_name}, Error: {e}",
|
||||||
2,
|
2,
|
||||||
)
|
)
|
||||||
logger(traceback.format_exc(), 2)
|
raise e
|
||||||
|
|
||||||
|
|
||||||
# class plex accept base url and token and username and password but default with none
|
# class plex accept base url and token and username and password but default with none
|
||||||
@@ -419,6 +224,7 @@ class Plex:
|
|||||||
ssl_bypass=False,
|
ssl_bypass=False,
|
||||||
session=None,
|
session=None,
|
||||||
):
|
):
|
||||||
|
self.server_type = "Plex"
|
||||||
self.baseurl = baseurl
|
self.baseurl = baseurl
|
||||||
self.token = token
|
self.token = token
|
||||||
self.username = username
|
self.username = username
|
||||||
@@ -448,15 +254,15 @@ class Plex:
|
|||||||
|
|
||||||
return plex
|
return plex
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if self.username or self.password:
|
if self.username:
|
||||||
msg = f"Failed to login via plex account {self.username}"
|
msg = f"Failed to login via plex account {self.username}"
|
||||||
logger(f"Plex: Failed to login, {msg}, Error: {e}", 2)
|
logger.error(f"Plex: Failed to login, {msg}, Error: {e}")
|
||||||
else:
|
else:
|
||||||
logger(f"Plex: Failed to login, Error: {e}", 2)
|
logger.error(f"Plex: Failed to login, Error: {e}")
|
||||||
raise Exception(e)
|
raise Exception(e)
|
||||||
|
|
||||||
def info(self) -> str:
|
def info(self) -> str:
|
||||||
return f"{self.plex.friendlyName}: {self.plex.version}"
|
return f"Plex {self.plex.friendlyName}: {self.plex.version}"
|
||||||
|
|
||||||
def get_users(self):
|
def get_users(self):
|
||||||
try:
|
try:
|
||||||
@@ -467,21 +273,104 @@ class Plex:
|
|||||||
|
|
||||||
return users
|
return users
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger(f"Plex: Failed to get users, Error: {e}", 2)
|
logger.error(f"Plex: Failed to get users, Error: {e}")
|
||||||
raise Exception(e)
|
raise Exception(e)
|
||||||
|
|
||||||
def get_watched(
|
def get_libraries(self) -> dict[str, str]:
|
||||||
self,
|
|
||||||
users,
|
|
||||||
blacklist_library,
|
|
||||||
whitelist_library,
|
|
||||||
blacklist_library_type,
|
|
||||||
whitelist_library_type,
|
|
||||||
library_mapping,
|
|
||||||
):
|
|
||||||
try:
|
try:
|
||||||
# Get all libraries
|
output = {}
|
||||||
users_watched = {}
|
|
||||||
|
libraries = self.plex.library.sections()
|
||||||
|
logger.debug(f"Plex: All Libraries {[library.title for library in libraries]}")
|
||||||
|
|
||||||
|
for library in libraries:
|
||||||
|
library_title = library.title
|
||||||
|
library_type = library.type
|
||||||
|
|
||||||
|
if library_type not in ["movie", "show"]:
|
||||||
|
logger.debug(
|
||||||
|
f"Plex: Skipping Library {library_title} found type {library_type}",
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
output[library_title] = library_type
|
||||||
|
|
||||||
|
return output
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Plex: Failed to get libraries, Error: {e}")
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
def get_user_library_watched(self, user, user_plex, library) -> LibraryData:
|
||||||
|
user_name: str = user.username.lower() if user.username else user.title.lower()
|
||||||
|
try:
|
||||||
|
logger.info(
|
||||||
|
f"Plex: Generating watched for {user_name} in library {library.title}",
|
||||||
|
)
|
||||||
|
watched = LibraryData(title=library.title)
|
||||||
|
|
||||||
|
library_videos = user_plex.library.section(library.title)
|
||||||
|
|
||||||
|
if library.type == "movie":
|
||||||
|
for video in library_videos.search(
|
||||||
|
unwatched=False
|
||||||
|
) + library_videos.search(inProgress=True):
|
||||||
|
if video.isWatched or video.viewOffset >= 60000:
|
||||||
|
watched.movies.append(get_mediaitem(video, video.isWatched))
|
||||||
|
|
||||||
|
elif library.type == "show":
|
||||||
|
# Keep track of processed shows to reduce duplicate shows
|
||||||
|
processed_shows = []
|
||||||
|
for show in library_videos.search(
|
||||||
|
unwatched=False
|
||||||
|
) + library_videos.search(inProgress=True):
|
||||||
|
if show.key in processed_shows:
|
||||||
|
continue
|
||||||
|
processed_shows.append(show.key)
|
||||||
|
show_guids = extract_guids_from_item(show)
|
||||||
|
episode_mediaitem = []
|
||||||
|
|
||||||
|
# Fetch watched or partially watched episodes
|
||||||
|
for episode in show.watched() + show.episodes(
|
||||||
|
viewOffset__gte=60_000
|
||||||
|
):
|
||||||
|
episode_mediaitem.append(
|
||||||
|
get_mediaitem(episode, episode.isWatched)
|
||||||
|
)
|
||||||
|
|
||||||
|
if episode_mediaitem:
|
||||||
|
watched.series.append(
|
||||||
|
Series(
|
||||||
|
identifiers=MediaIdentifiers(
|
||||||
|
title=show.title,
|
||||||
|
locations=(
|
||||||
|
tuple(
|
||||||
|
[
|
||||||
|
location.split("/")[-1]
|
||||||
|
for location in show.locations
|
||||||
|
]
|
||||||
|
)
|
||||||
|
if generate_locations
|
||||||
|
else tuple()
|
||||||
|
),
|
||||||
|
imdb_id=show_guids.get("imdb", None),
|
||||||
|
tvdb_id=show_guids.get("tvdb", None),
|
||||||
|
tmdb_id=show_guids.get("tmdb", None),
|
||||||
|
),
|
||||||
|
episodes=episode_mediaitem,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return watched
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Plex: Failed to get watched for {user_name} in library {library.title}, Error: {e}",
|
||||||
|
)
|
||||||
|
return LibraryData(title=library.title)
|
||||||
|
|
||||||
|
def get_watched(self, users, sync_libraries) -> dict[str, UserData]:
|
||||||
|
try:
|
||||||
|
users_watched: dict[str, UserData] = {}
|
||||||
|
|
||||||
for user in users:
|
for user in users:
|
||||||
if self.admin_user == user:
|
if self.admin_user == user:
|
||||||
@@ -494,61 +383,46 @@ class Plex:
|
|||||||
token,
|
token,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
logger(
|
logger.error(
|
||||||
f"Plex: Failed to get token for {user.title}, skipping",
|
f"Plex: Failed to get token for {user.title}, skipping",
|
||||||
2,
|
|
||||||
)
|
)
|
||||||
users_watched[user.title] = {}
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
libraries = user_plex.library.sections()
|
libraries = user_plex.library.sections()
|
||||||
|
|
||||||
for library in libraries:
|
for library in libraries:
|
||||||
library_title = library.title
|
if library.title not in sync_libraries:
|
||||||
library_type = library.type
|
|
||||||
|
|
||||||
skip_reason = check_skip_logic(
|
|
||||||
library_title,
|
|
||||||
library_type,
|
|
||||||
blacklist_library,
|
|
||||||
whitelist_library,
|
|
||||||
blacklist_library_type,
|
|
||||||
whitelist_library_type,
|
|
||||||
library_mapping,
|
|
||||||
)
|
|
||||||
|
|
||||||
if skip_reason:
|
|
||||||
logger(
|
|
||||||
f"Plex: Skipping library {library_title}: {skip_reason}", 1
|
|
||||||
)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
user_watched = get_user_library_watched(user, user_plex, library)
|
library_data = self.get_user_library_watched(
|
||||||
|
user, user_plex, library
|
||||||
|
)
|
||||||
|
|
||||||
for user_watched, user_watched_temp in user_watched.items():
|
if user.title.lower() not in users_watched:
|
||||||
if user_watched not in users_watched:
|
users_watched[user.title.lower()] = UserData()
|
||||||
users_watched[user_watched] = {}
|
|
||||||
users_watched[user_watched].update(user_watched_temp)
|
users_watched[user.title.lower()].libraries[library.title] = (
|
||||||
|
library_data
|
||||||
|
)
|
||||||
|
|
||||||
return users_watched
|
return users_watched
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger(f"Plex: Failed to get watched, Error: {e}", 2)
|
logger.error(f"Plex: Failed to get watched, Error: {e}")
|
||||||
raise Exception(e)
|
raise Exception(e)
|
||||||
|
|
||||||
def update_watched(
|
def update_watched(
|
||||||
self, watched_list, user_mapping=None, library_mapping=None, dryrun=False
|
self,
|
||||||
|
watched_list: dict[str, UserData],
|
||||||
|
user_mapping=None,
|
||||||
|
library_mapping=None,
|
||||||
|
dryrun=False,
|
||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
args = []
|
for user, user_data in watched_list.items():
|
||||||
|
|
||||||
for user, libraries in watched_list.items():
|
|
||||||
user_other = None
|
user_other = None
|
||||||
# If type of user is dict
|
# If type of user is dict
|
||||||
if user_mapping:
|
if user_mapping:
|
||||||
if user in user_mapping.keys():
|
user_other = search_mapping(user_mapping, user)
|
||||||
user_other = user_mapping[user]
|
|
||||||
elif user in user_mapping.values():
|
|
||||||
user_other = search_mapping(user_mapping, user)
|
|
||||||
|
|
||||||
for index, value in enumerate(self.users):
|
for index, value in enumerate(self.users):
|
||||||
username_title = (
|
username_title = (
|
||||||
@@ -568,9 +442,8 @@ class Plex:
|
|||||||
user_plex = self.plex
|
user_plex = self.plex
|
||||||
else:
|
else:
|
||||||
if isinstance(user, str):
|
if isinstance(user, str):
|
||||||
logger(
|
logger.warning(
|
||||||
f"Plex: {user} is not a plex object, attempting to get object for user",
|
f"Plex: {user} is not a plex object, attempting to get object for user",
|
||||||
4,
|
|
||||||
)
|
)
|
||||||
user = self.plex.myPlexAccount().user(user)
|
user = self.plex.myPlexAccount().user(user)
|
||||||
|
|
||||||
@@ -582,57 +455,48 @@ class Plex:
|
|||||||
session=self.session,
|
session=self.session,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
logger(
|
logger.error(
|
||||||
f"Plex: Failed to get token for {user.title}, skipping",
|
f"Plex: Failed to get token for {user.title}, skipping",
|
||||||
2,
|
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for library, videos in libraries.items():
|
for library_name in user_data.libraries:
|
||||||
|
library_data = user_data.libraries[library_name]
|
||||||
library_other = None
|
library_other = None
|
||||||
if library_mapping:
|
if library_mapping:
|
||||||
if library in library_mapping.keys():
|
library_other = search_mapping(library_mapping, library_name)
|
||||||
library_other = library_mapping[library]
|
|
||||||
elif library in library_mapping.values():
|
|
||||||
library_other = search_mapping(library_mapping, library)
|
|
||||||
|
|
||||||
# if library in plex library list
|
# if library in plex library list
|
||||||
library_list = user_plex.library.sections()
|
library_list = user_plex.library.sections()
|
||||||
if library.lower() not in [x.title.lower() for x in library_list]:
|
if library_name.lower() not in [
|
||||||
|
x.title.lower() for x in library_list
|
||||||
|
]:
|
||||||
if library_other:
|
if library_other:
|
||||||
if library_other.lower() in [
|
if library_other.lower() in [
|
||||||
x.title.lower() for x in library_list
|
x.title.lower() for x in library_list
|
||||||
]:
|
]:
|
||||||
logger(
|
logger.info(
|
||||||
f"Plex: Library {library} not found, but {library_other} found, using {library_other}",
|
f"Plex: Library {library_name} not found, but {library_other} found, using {library_other}",
|
||||||
1,
|
|
||||||
)
|
)
|
||||||
library = library_other
|
library_name = library_other
|
||||||
else:
|
else:
|
||||||
logger(
|
logger.info(
|
||||||
f"Plex: Library {library} or {library_other} not found in library list",
|
f"Plex: Library {library_name} or {library_other} not found in library list",
|
||||||
1,
|
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
logger(
|
logger.info(
|
||||||
f"Plex: Library {library} not found in library list",
|
f"Plex: Library {library_name} not found in library list",
|
||||||
1,
|
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
args.append(
|
update_user_watched(
|
||||||
[
|
user,
|
||||||
update_user_watched,
|
user_plex,
|
||||||
user,
|
library_data,
|
||||||
user_plex,
|
library_name,
|
||||||
library,
|
dryrun,
|
||||||
videos,
|
|
||||||
dryrun,
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
future_thread_executor(args)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger(f"Plex: Failed to update watched, Error: {e}", 2)
|
logger.error(f"Plex: Failed to update watched, Error: {e}")
|
||||||
raise Exception(e)
|
raise Exception(e)
|
||||||
|
|||||||
116
src/users.py
116
src/users.py
@@ -1,30 +1,35 @@
|
|||||||
from src.functions import (
|
from plexapi.myplex import MyPlexAccount
|
||||||
logger,
|
from loguru import logger
|
||||||
search_mapping,
|
|
||||||
)
|
from src.emby import Emby
|
||||||
|
from src.jellyfin import Jellyfin
|
||||||
|
from src.plex import Plex
|
||||||
|
from src.functions import search_mapping
|
||||||
|
|
||||||
|
|
||||||
def generate_user_list(server):
|
def generate_user_list(server: Plex | Jellyfin | Emby) -> list[str]:
|
||||||
# generate list of users from server 1 and server 2
|
# generate list of users from server 1 and server 2
|
||||||
server_type = server[0]
|
|
||||||
server_connection = server[1]
|
|
||||||
|
|
||||||
server_users = []
|
server_users: list[str] = []
|
||||||
if server_type == "plex":
|
if isinstance(server, Plex):
|
||||||
for user in server_connection.users:
|
for user in server.users:
|
||||||
server_users.append(
|
server_users.append(
|
||||||
user.username.lower() if user.username else user.title.lower()
|
user.username.lower() if user.username else user.title.lower()
|
||||||
)
|
)
|
||||||
|
|
||||||
elif server_type == "jellyfin":
|
elif isinstance(server, (Jellyfin, Emby)):
|
||||||
server_users = [key.lower() for key in server_connection.users.keys()]
|
server_users = [key.lower() for key in server.users.keys()]
|
||||||
|
|
||||||
return server_users
|
return server_users
|
||||||
|
|
||||||
|
|
||||||
def combine_user_lists(server_1_users, server_2_users, user_mapping):
|
def combine_user_lists(
|
||||||
|
server_1_users: list[str],
|
||||||
|
server_2_users: list[str],
|
||||||
|
user_mapping: dict[str, str] | None,
|
||||||
|
) -> dict[str, str]:
|
||||||
# combined list of overlapping users from plex and jellyfin
|
# combined list of overlapping users from plex and jellyfin
|
||||||
users = {}
|
users: dict[str, str] = {}
|
||||||
|
|
||||||
for server_1_user in server_1_users:
|
for server_1_user in server_1_users:
|
||||||
if user_mapping:
|
if user_mapping:
|
||||||
@@ -49,13 +54,15 @@ def combine_user_lists(server_1_users, server_2_users, user_mapping):
|
|||||||
return users
|
return users
|
||||||
|
|
||||||
|
|
||||||
def filter_user_lists(users, blacklist_users, whitelist_users):
|
def filter_user_lists(
|
||||||
users_filtered = {}
|
users: dict[str, str], blacklist_users: list[str], whitelist_users: list[str]
|
||||||
|
) -> dict[str, str]:
|
||||||
|
users_filtered: dict[str, str] = {}
|
||||||
for user in users:
|
for user in users:
|
||||||
# whitelist_user is not empty and user lowercase is not in whitelist lowercase
|
# whitelist_user is not empty and user lowercase is not in whitelist lowercase
|
||||||
if len(whitelist_users) > 0:
|
if len(whitelist_users) > 0:
|
||||||
if user not in whitelist_users and users[user] not in whitelist_users:
|
if user not in whitelist_users and users[user] not in whitelist_users:
|
||||||
logger(f"{user} or {users[user]} is not in whitelist", 1)
|
logger.info(f"{user} or {users[user]} is not in whitelist")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if user not in blacklist_users and users[user] not in blacklist_users:
|
if user not in blacklist_users and users[user] not in blacklist_users:
|
||||||
@@ -64,12 +71,13 @@ def filter_user_lists(users, blacklist_users, whitelist_users):
|
|||||||
return users_filtered
|
return users_filtered
|
||||||
|
|
||||||
|
|
||||||
def generate_server_users(server, users):
|
def generate_server_users(
|
||||||
server_users = None
|
server: Plex | Jellyfin | Emby,
|
||||||
|
users: dict[str, str],
|
||||||
if server[0] == "plex":
|
) -> list[MyPlexAccount] | dict[str, str] | None:
|
||||||
server_users = []
|
if isinstance(server, Plex):
|
||||||
for plex_user in server[1].users:
|
plex_server_users: list[MyPlexAccount] = []
|
||||||
|
for plex_user in server.users:
|
||||||
username_title = (
|
username_title = (
|
||||||
plex_user.username if plex_user.username else plex_user.title
|
plex_user.username if plex_user.username else plex_user.title
|
||||||
)
|
)
|
||||||
@@ -78,14 +86,64 @@ def generate_server_users(server, users):
|
|||||||
username_title.lower() in users.keys()
|
username_title.lower() in users.keys()
|
||||||
or username_title.lower() in users.values()
|
or username_title.lower() in users.values()
|
||||||
):
|
):
|
||||||
server_users.append(plex_user)
|
plex_server_users.append(plex_user)
|
||||||
elif server[0] == "jellyfin":
|
|
||||||
server_users = {}
|
return plex_server_users
|
||||||
for jellyfin_user, jellyfin_id in server[1].users.items():
|
elif isinstance(server, (Jellyfin, Emby)):
|
||||||
|
jelly_emby_server_users: dict[str, str] = {}
|
||||||
|
for jellyfin_user, jellyfin_id in server.users.items():
|
||||||
if (
|
if (
|
||||||
jellyfin_user.lower() in users.keys()
|
jellyfin_user.lower() in users.keys()
|
||||||
or jellyfin_user.lower() in users.values()
|
or jellyfin_user.lower() in users.values()
|
||||||
):
|
):
|
||||||
server_users[jellyfin_user] = jellyfin_id
|
jelly_emby_server_users[jellyfin_user] = jellyfin_id
|
||||||
|
|
||||||
return server_users
|
return jelly_emby_server_users
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def setup_users(
|
||||||
|
server_1: Plex | Jellyfin | Emby,
|
||||||
|
server_2: Plex | Jellyfin | Emby,
|
||||||
|
blacklist_users: list[str],
|
||||||
|
whitelist_users: list[str],
|
||||||
|
user_mapping: dict[str, str] | None = None,
|
||||||
|
) -> tuple[list[MyPlexAccount] | dict[str, str], list[MyPlexAccount] | dict[str, str]]:
|
||||||
|
server_1_users = generate_user_list(server_1)
|
||||||
|
server_2_users = generate_user_list(server_2)
|
||||||
|
logger.debug(f"Server 1 users: {server_1_users}")
|
||||||
|
logger.debug(f"Server 2 users: {server_2_users}")
|
||||||
|
|
||||||
|
users = combine_user_lists(server_1_users, server_2_users, user_mapping)
|
||||||
|
logger.debug(f"User list that exist on both servers {users}")
|
||||||
|
|
||||||
|
users_filtered = filter_user_lists(users, blacklist_users, whitelist_users)
|
||||||
|
logger.debug(f"Filtered user list {users_filtered}")
|
||||||
|
|
||||||
|
output_server_1_users = generate_server_users(server_1, users_filtered)
|
||||||
|
output_server_2_users = generate_server_users(server_2, users_filtered)
|
||||||
|
|
||||||
|
# Check if users is none or empty
|
||||||
|
if output_server_1_users is None or len(output_server_1_users) == 0:
|
||||||
|
logger.warning(
|
||||||
|
f"No users found for server 1 {type(server_1)}, users: {server_1_users}, overlapping users {users}, filtered users {users_filtered}, server 1 users {server_1.users}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if output_server_2_users is None or len(output_server_2_users) == 0:
|
||||||
|
logger.warning(
|
||||||
|
f"No users found for server 2 {type(server_2)}, users: {server_2_users}, overlapping users {users} filtered users {users_filtered}, server 2 users {server_2.users}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
output_server_1_users is None
|
||||||
|
or len(output_server_1_users) == 0
|
||||||
|
or output_server_2_users is None
|
||||||
|
or len(output_server_2_users) == 0
|
||||||
|
):
|
||||||
|
raise Exception("No users found for one or both servers")
|
||||||
|
|
||||||
|
logger.info(f"Server 1 users: {output_server_1_users}")
|
||||||
|
logger.info(f"Server 2 users: {output_server_2_users}")
|
||||||
|
|
||||||
|
return output_server_1_users, output_server_2_users
|
||||||
|
|||||||
432
src/watched.py
432
src/watched.py
@@ -1,82 +1,111 @@
|
|||||||
import copy
|
import copy
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
from src.functions import logger, search_mapping, contains_nested
|
from src.functions import search_mapping
|
||||||
|
|
||||||
from src.library import generate_library_guids_dict
|
|
||||||
|
|
||||||
|
|
||||||
def combine_watched_dicts(dicts: list):
|
class MediaIdentifiers(BaseModel):
|
||||||
# Ensure that the input is a list of dictionaries
|
title: str
|
||||||
if not all(isinstance(d, dict) for d in dicts):
|
|
||||||
raise ValueError("Input must be a list of dictionaries")
|
|
||||||
|
|
||||||
combined_dict = {}
|
# File information, will be folder for series and media file for episode/movie
|
||||||
|
locations: tuple[str, ...] = tuple()
|
||||||
|
|
||||||
for single_dict in dicts:
|
# Guids
|
||||||
for key, value in single_dict.items():
|
imdb_id: str | None = None
|
||||||
if key not in combined_dict:
|
tvdb_id: str | None = None
|
||||||
combined_dict[key] = {}
|
tmdb_id: str | None = None
|
||||||
|
|
||||||
for subkey, subvalue in value.items():
|
|
||||||
if subkey in combined_dict[key]:
|
|
||||||
# If the subkey already exists in the combined dictionary,
|
|
||||||
# check if the values are different and raise an exception if they are
|
|
||||||
if combined_dict[key][subkey] != subvalue:
|
|
||||||
raise ValueError(
|
|
||||||
f"Conflicting values for subkey '{subkey}' under key '{key}'"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# If the subkey does not exist in the combined dictionary, add it
|
|
||||||
combined_dict[key][subkey] = subvalue
|
|
||||||
|
|
||||||
return combined_dict
|
|
||||||
|
|
||||||
|
|
||||||
def check_remove_entry(video, library, video_index, library_watched_list_2):
|
class WatchedStatus(BaseModel):
|
||||||
if video_index is not None:
|
completed: bool
|
||||||
if (
|
time: int
|
||||||
library_watched_list_2["completed"][video_index]
|
|
||||||
== video["status"]["completed"]
|
|
||||||
) and (library_watched_list_2["time"][video_index] == video["status"]["time"]):
|
class MediaItem(BaseModel):
|
||||||
logger(
|
identifiers: MediaIdentifiers
|
||||||
f"Removing {video['title']} from {library} due to exact match",
|
status: WatchedStatus
|
||||||
3,
|
|
||||||
)
|
|
||||||
return True
|
class Series(BaseModel):
|
||||||
elif (
|
identifiers: MediaIdentifiers
|
||||||
library_watched_list_2["completed"][video_index] == True
|
episodes: list[MediaItem] = []
|
||||||
and video["status"]["completed"] == False
|
|
||||||
):
|
|
||||||
logger(
|
class LibraryData(BaseModel):
|
||||||
f"Removing {video['title']} from {library} due to being complete in one library and not the other",
|
title: str
|
||||||
3,
|
movies: list[MediaItem] = []
|
||||||
)
|
series: list[Series] = []
|
||||||
return True
|
|
||||||
elif (
|
|
||||||
library_watched_list_2["completed"][video_index] == False
|
class UserData(BaseModel):
|
||||||
and video["status"]["completed"] == False
|
libraries: dict[str, LibraryData] = {}
|
||||||
) and (video["status"]["time"] < library_watched_list_2["time"][video_index]):
|
|
||||||
logger(
|
|
||||||
f"Removing {video['title']} from {library} due to more time watched in one library than the other",
|
def check_same_identifiers(item1: MediaIdentifiers, item2: MediaIdentifiers) -> bool:
|
||||||
3,
|
# Check for duplicate based on file locations:
|
||||||
)
|
if item1.locations and item2.locations:
|
||||||
return True
|
if set(item1.locations) & set(item2.locations):
|
||||||
elif (
|
|
||||||
library_watched_list_2["completed"][video_index] == True
|
|
||||||
and video["status"]["completed"] == True
|
|
||||||
):
|
|
||||||
logger(
|
|
||||||
f"Removing {video['title']} from {library} due to being complete in both libraries",
|
|
||||||
3,
|
|
||||||
)
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
# Check for duplicate based on GUIDs:
|
||||||
|
if (
|
||||||
|
(item1.imdb_id and item2.imdb_id and item1.imdb_id == item2.imdb_id)
|
||||||
|
or (item1.tvdb_id and item2.tvdb_id and item1.tvdb_id == item2.tvdb_id)
|
||||||
|
or (item1.tmdb_id and item2.tmdb_id and item1.tmdb_id == item2.tmdb_id)
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def check_remove_entry(item1: MediaItem, item2: MediaItem) -> bool:
|
||||||
|
"""
|
||||||
|
Returns True if item1 (from watched_list_1) should be removed
|
||||||
|
in favor of item2 (from watched_list_2), based on:
|
||||||
|
- Duplicate criteria:
|
||||||
|
* They match if any file location is shared OR
|
||||||
|
at least one of imdb_id, tvdb_id, or tmdb_id matches.
|
||||||
|
- Watched status:
|
||||||
|
* If one is complete and the other is not, remove the incomplete one.
|
||||||
|
* If both are incomplete, remove the one with lower progress (time).
|
||||||
|
* If both are complete, remove item1 as duplicate.
|
||||||
|
"""
|
||||||
|
if not check_same_identifiers(item1.identifiers, item2.identifiers):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Compare watched statuses.
|
||||||
|
status1 = item1.status
|
||||||
|
status2 = item2.status
|
||||||
|
|
||||||
|
# If one is complete and the other isn't, remove the one that's not complete.
|
||||||
|
if status1.completed != status2.completed:
|
||||||
|
if not status1.completed and status2.completed:
|
||||||
|
return True # Remove item1 since it's not complete.
|
||||||
|
else:
|
||||||
|
return False # Do not remove item1; it's complete.
|
||||||
|
|
||||||
|
# Both have the same completed status.
|
||||||
|
if not status1.completed and not status2.completed:
|
||||||
|
# Both incomplete: remove the one with lower progress (time)
|
||||||
|
if status1.time < status2.time:
|
||||||
|
return True # Remove item1 because it has watched less.
|
||||||
|
elif status1.time > status2.time:
|
||||||
|
return False # Keep item1 because it has more progress.
|
||||||
|
else:
|
||||||
|
# Same progress; Remove duplicate
|
||||||
|
return True
|
||||||
|
|
||||||
|
# If both are complete, consider item1 the duplicate and remove it.
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def cleanup_watched(
|
def cleanup_watched(
|
||||||
watched_list_1, watched_list_2, user_mapping=None, library_mapping=None
|
watched_list_1: dict[str, UserData],
|
||||||
):
|
watched_list_2: dict[str, UserData],
|
||||||
|
user_mapping=None,
|
||||||
|
library_mapping=None,
|
||||||
|
) -> dict[str, UserData]:
|
||||||
modified_watched_list_1 = copy.deepcopy(watched_list_1)
|
modified_watched_list_1 = copy.deepcopy(watched_list_1)
|
||||||
|
|
||||||
# remove entries from watched_list_1 that are in watched_list_2
|
# remove entries from watched_list_1 that are in watched_list_2
|
||||||
@@ -88,110 +117,84 @@ def cleanup_watched(
|
|||||||
if user_2 is None:
|
if user_2 is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for library_1 in watched_list_1[user_1]:
|
for library_1_key in watched_list_1[user_1].libraries:
|
||||||
library_other = None
|
library_other = None
|
||||||
if library_mapping:
|
if library_mapping:
|
||||||
library_other = search_mapping(library_mapping, library_1)
|
library_other = search_mapping(library_mapping, library_1_key)
|
||||||
library_2 = get_other(watched_list_2[user_2], library_1, library_other)
|
library_2_key = get_other(
|
||||||
if library_2 is None:
|
watched_list_2[user_2].libraries, library_1_key, library_other
|
||||||
|
)
|
||||||
|
if library_2_key is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
(
|
library_1 = watched_list_1[user_1].libraries[library_1_key]
|
||||||
_,
|
library_2 = watched_list_2[user_2].libraries[library_2_key]
|
||||||
episode_watched_list_2_keys_dict,
|
|
||||||
movies_watched_list_2_keys_dict,
|
|
||||||
) = generate_library_guids_dict(watched_list_2[user_2][library_2])
|
|
||||||
|
|
||||||
# Movies
|
filtered_movies = []
|
||||||
if isinstance(watched_list_1[user_1][library_1], list):
|
for movie in library_1.movies:
|
||||||
for movie in watched_list_1[user_1][library_1]:
|
remove_flag = False
|
||||||
movie_index = get_movie_index_in_dict(
|
for movie2 in library_2.movies:
|
||||||
movie, movies_watched_list_2_keys_dict
|
if check_remove_entry(movie, movie2):
|
||||||
)
|
logger.trace(f"Removing movie: {movie.identifiers.title}")
|
||||||
if movie_index is not None:
|
remove_flag = True
|
||||||
if check_remove_entry(
|
break
|
||||||
movie,
|
|
||||||
library_1,
|
if not remove_flag:
|
||||||
movie_index,
|
filtered_movies.append(movie)
|
||||||
movies_watched_list_2_keys_dict,
|
|
||||||
):
|
modified_watched_list_1[user_1].libraries[
|
||||||
modified_watched_list_1[user_1][library_1].remove(movie)
|
library_1_key
|
||||||
|
].movies = filtered_movies
|
||||||
|
|
||||||
# TV Shows
|
# TV Shows
|
||||||
elif isinstance(watched_list_1[user_1][library_1], dict):
|
filtered_series_list = []
|
||||||
for show_key_1 in watched_list_1[user_1][library_1].keys():
|
for series1 in library_1.series:
|
||||||
show_key_dict = dict(show_key_1)
|
matching_series = None
|
||||||
|
for series2 in library_2.series:
|
||||||
|
if check_same_identifiers(series1.identifiers, series2.identifiers):
|
||||||
|
matching_series = series2
|
||||||
|
break
|
||||||
|
|
||||||
for season in watched_list_1[user_1][library_1][show_key_1]:
|
if matching_series is None:
|
||||||
# Filter the episode_watched_list_2_keys_dict dictionary to handle cases
|
# No matching show in watched_list_2; keep the series as is.
|
||||||
# where episode location names are not unique such as S01E01.mkv
|
filtered_series_list.append(series1)
|
||||||
filtered_episode_watched_list_2_keys_dict = (
|
else:
|
||||||
filter_episode_watched_list_2_keys_dict(
|
# We have a matching show; now clean up the episodes.
|
||||||
episode_watched_list_2_keys_dict, show_key_dict, season
|
filtered_episodes = []
|
||||||
)
|
for ep1 in series1.episodes:
|
||||||
)
|
remove_flag = False
|
||||||
for episode in watched_list_1[user_1][library_1][show_key_1][
|
for ep2 in matching_series.episodes:
|
||||||
season
|
if check_remove_entry(ep1, ep2):
|
||||||
]:
|
logger.trace(
|
||||||
episode_index = get_episode_index_in_dict(
|
f"Removing episode '{ep1.identifiers.title}' from show '{series1.identifiers.title}'",
|
||||||
episode, filtered_episode_watched_list_2_keys_dict
|
|
||||||
)
|
|
||||||
if episode_index is not None:
|
|
||||||
if check_remove_entry(
|
|
||||||
episode,
|
|
||||||
library_1,
|
|
||||||
episode_index,
|
|
||||||
episode_watched_list_2_keys_dict,
|
|
||||||
):
|
|
||||||
modified_watched_list_1[user_1][library_1][
|
|
||||||
show_key_1
|
|
||||||
][season].remove(episode)
|
|
||||||
|
|
||||||
# Remove empty seasons
|
|
||||||
if (
|
|
||||||
len(
|
|
||||||
modified_watched_list_1[user_1][library_1][show_key_1][
|
|
||||||
season
|
|
||||||
]
|
|
||||||
)
|
|
||||||
== 0
|
|
||||||
):
|
|
||||||
if (
|
|
||||||
season
|
|
||||||
in modified_watched_list_1[user_1][library_1][
|
|
||||||
show_key_1
|
|
||||||
]
|
|
||||||
):
|
|
||||||
logger(
|
|
||||||
f"Removing {season} from {show_key_dict['title']} because it is empty",
|
|
||||||
3,
|
|
||||||
)
|
)
|
||||||
del modified_watched_list_1[user_1][library_1][
|
remove_flag = True
|
||||||
show_key_1
|
break
|
||||||
][season]
|
if not remove_flag:
|
||||||
|
filtered_episodes.append(ep1)
|
||||||
|
|
||||||
# Remove empty shows
|
# Only keep the series if there are remaining episodes.
|
||||||
if len(modified_watched_list_1[user_1][library_1][show_key_1]) == 0:
|
if filtered_episodes:
|
||||||
if show_key_1 in modified_watched_list_1[user_1][library_1]:
|
modified_series1 = copy.deepcopy(series1)
|
||||||
logger(
|
modified_series1.episodes = filtered_episodes
|
||||||
f"Removing {show_key_dict['title']} because it is empty",
|
filtered_series_list.append(modified_series1)
|
||||||
3,
|
else:
|
||||||
)
|
logger.trace(
|
||||||
del modified_watched_list_1[user_1][library_1][show_key_1]
|
f"Removing entire show '{series1.identifiers.title}' as no episodes remain after cleanup.",
|
||||||
|
)
|
||||||
|
modified_watched_list_1[user_1].libraries[
|
||||||
|
library_1_key
|
||||||
|
].series = filtered_series_list
|
||||||
|
|
||||||
for user_1 in watched_list_1:
|
# After processing, remove any library that is completely empty.
|
||||||
for library_1 in watched_list_1[user_1]:
|
for user, user_data in modified_watched_list_1.items():
|
||||||
if library_1 in modified_watched_list_1[user_1]:
|
new_libraries = {}
|
||||||
# If library is empty then remove it
|
for lib_key, library in user_data.libraries.items():
|
||||||
if len(modified_watched_list_1[user_1][library_1]) == 0:
|
if library.movies or library.series:
|
||||||
logger(f"Removing {library_1} from {user_1} because it is empty", 1)
|
new_libraries[lib_key] = library
|
||||||
del modified_watched_list_1[user_1][library_1]
|
else:
|
||||||
|
logger.trace(f"Removing empty library '{lib_key}' for user '{user}'")
|
||||||
if user_1 in modified_watched_list_1:
|
user_data.libraries = new_libraries
|
||||||
# If user is empty delete user
|
|
||||||
if len(modified_watched_list_1[user_1]) == 0:
|
|
||||||
logger(f"Removing {user_1} from watched list 1 because it is empty", 1)
|
|
||||||
del modified_watched_list_1[user_1]
|
|
||||||
|
|
||||||
return modified_watched_list_1
|
return modified_watched_list_1
|
||||||
|
|
||||||
@@ -202,116 +205,5 @@ def get_other(watched_list, object_1, object_2):
|
|||||||
elif object_2 in watched_list:
|
elif object_2 in watched_list:
|
||||||
return object_2
|
return object_2
|
||||||
else:
|
else:
|
||||||
logger(f"{object_1} and {object_2} not found in watched list 2", 1)
|
logger.info(f"{object_1} and {object_2} not found in watched list 2")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_movie_index_in_dict(movie, movies_watched_list_2_keys_dict):
|
|
||||||
# Iterate through the keys and values of the movie dictionary
|
|
||||||
for movie_key, movie_value in movie.items():
|
|
||||||
# If the key is "locations", check if the "locations" key is present in the movies_watched_list_2_keys_dict dictionary
|
|
||||||
if movie_key == "locations":
|
|
||||||
if "locations" in movies_watched_list_2_keys_dict.keys():
|
|
||||||
# Iterate through the locations in the movie dictionary
|
|
||||||
for location in movie_value:
|
|
||||||
# If the location is in the movies_watched_list_2_keys_dict dictionary, return index of the key
|
|
||||||
return contains_nested(
|
|
||||||
location, movies_watched_list_2_keys_dict["locations"]
|
|
||||||
)
|
|
||||||
|
|
||||||
# If the key is not "locations", check if the movie_key is present in the movies_watched_list_2_keys_dict dictionary
|
|
||||||
else:
|
|
||||||
if movie_key in movies_watched_list_2_keys_dict.keys():
|
|
||||||
# If the movie_value is in the movies_watched_list_2_keys_dict dictionary, return True
|
|
||||||
if movie_value in movies_watched_list_2_keys_dict[movie_key]:
|
|
||||||
return movies_watched_list_2_keys_dict[movie_key].index(movie_value)
|
|
||||||
|
|
||||||
# If the loop completes without finding a match, return False
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def filter_episode_watched_list_2_keys_dict(
|
|
||||||
episode_watched_list_2_keys_dict, show_key_dict, season
|
|
||||||
):
|
|
||||||
# If the episode_watched_list_2_keys_dict dictionary is empty, missing season or show then return an empty dictionary
|
|
||||||
if (
|
|
||||||
len(episode_watched_list_2_keys_dict) == 0
|
|
||||||
or "season" not in episode_watched_list_2_keys_dict.keys()
|
|
||||||
or "show" not in episode_watched_list_2_keys_dict.keys()
|
|
||||||
):
|
|
||||||
return {}
|
|
||||||
|
|
||||||
# Filter the episode_watched_list_2_keys_dict dictionary to only include values for the correct show and season
|
|
||||||
filtered_episode_watched_list_2_keys_dict = {}
|
|
||||||
show_indecies = []
|
|
||||||
season_indecies = []
|
|
||||||
|
|
||||||
# Iterate through episode_watched_list_2_keys_dict["season"] and find the indecies that match season
|
|
||||||
for season_index, season_value in enumerate(
|
|
||||||
episode_watched_list_2_keys_dict.get("season")
|
|
||||||
):
|
|
||||||
if season_value == season:
|
|
||||||
season_indecies.append(season_index)
|
|
||||||
|
|
||||||
# Iterate through episode_watched_list_2_keys_dict["show"] and find the indecies that match show_key_dict
|
|
||||||
for show_index, show_value in enumerate(episode_watched_list_2_keys_dict["show"]):
|
|
||||||
# Iterate through the keys and values of the show_value dictionary and check if they match show_key_dict
|
|
||||||
for show_key, show_key_value in show_value.items():
|
|
||||||
if show_key == "locations":
|
|
||||||
# Iterate through the locations in the show_value dictionary
|
|
||||||
for location in show_key_value:
|
|
||||||
# If the location is in the episode_watched_list_2_keys_dict dictionary, return index of the key
|
|
||||||
if (
|
|
||||||
contains_nested(location, show_key_dict["locations"])
|
|
||||||
is not None
|
|
||||||
):
|
|
||||||
show_indecies.append(show_index)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
if show_key in show_key_dict.keys():
|
|
||||||
if show_key_value == show_key_dict[show_key]:
|
|
||||||
show_indecies.append(show_index)
|
|
||||||
break
|
|
||||||
|
|
||||||
# Find the intersection of the show_indecies and season_indecies lists
|
|
||||||
indecies = list(set(show_indecies) & set(season_indecies))
|
|
||||||
|
|
||||||
# If there are no indecies that match the show and season, return an empty dictionary
|
|
||||||
if len(indecies) == 0:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
# Create a copy of the dictionary with indecies that match the show and season and none that don't
|
|
||||||
for key, value in episode_watched_list_2_keys_dict.items():
|
|
||||||
if key not in filtered_episode_watched_list_2_keys_dict:
|
|
||||||
filtered_episode_watched_list_2_keys_dict[key] = []
|
|
||||||
|
|
||||||
for index, _ in enumerate(value):
|
|
||||||
if index in indecies:
|
|
||||||
filtered_episode_watched_list_2_keys_dict[key].append(value[index])
|
|
||||||
else:
|
|
||||||
filtered_episode_watched_list_2_keys_dict[key].append(None)
|
|
||||||
|
|
||||||
return filtered_episode_watched_list_2_keys_dict
|
|
||||||
|
|
||||||
|
|
||||||
def get_episode_index_in_dict(episode, episode_watched_list_2_keys_dict):
|
|
||||||
# Iterate through the keys and values of the episode dictionary
|
|
||||||
for episode_key, episode_value in episode.items():
|
|
||||||
if episode_key in episode_watched_list_2_keys_dict.keys():
|
|
||||||
if episode_key == "locations":
|
|
||||||
# Iterate through the locations in the episode dictionary
|
|
||||||
for location in episode_value:
|
|
||||||
# If the location is in the episode_watched_list_2_keys_dict dictionary, return index of the key
|
|
||||||
return contains_nested(
|
|
||||||
location, episode_watched_list_2_keys_dict["locations"]
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# If the episode_value is in the episode_watched_list_2_keys_dict dictionary, return True
|
|
||||||
if episode_value in episode_watched_list_2_keys_dict[episode_key]:
|
|
||||||
return episode_watched_list_2_keys_dict[episode_key].index(
|
|
||||||
episode_value
|
|
||||||
)
|
|
||||||
|
|
||||||
# If the loop completes without finding a match, return False
|
|
||||||
return None
|
|
||||||
|
|||||||
113
test/ci_emby.env
Normal file
113
test/ci_emby.env
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
# Global Settings
|
||||||
|
|
||||||
|
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||||
|
DRYRUN = "True"
|
||||||
|
|
||||||
|
## Debugging level, "info" is default, "debug" is more verbose
|
||||||
|
DEBUG_LEVEL = "trace"
|
||||||
|
|
||||||
|
## If set to true then the script will only run once and then exit
|
||||||
|
RUN_ONLY_ONCE = "True"
|
||||||
|
|
||||||
|
## How often to run the script in seconds
|
||||||
|
SLEEP_DURATION = 10
|
||||||
|
|
||||||
|
## Log file where all output will be written to
|
||||||
|
LOG_FILE = "log.log"
|
||||||
|
|
||||||
|
## Mark file where all shows/movies that have been marked as played will be written to
|
||||||
|
MARK_FILE = "mark.log"
|
||||||
|
|
||||||
|
## Timeout for requests for jellyfin
|
||||||
|
REQUEST_TIMEOUT = 300
|
||||||
|
|
||||||
|
## Max threads for processing
|
||||||
|
MAX_THREADS = 2
|
||||||
|
|
||||||
|
## Generate guids
|
||||||
|
## Generating guids is a slow process, so this is a way to speed up the process
|
||||||
|
# by using the location only, useful when using same files on multiple servers
|
||||||
|
GENERATE_GUIDS = "True"
|
||||||
|
|
||||||
|
## Generate locations
|
||||||
|
## Generating locations is a slow process, so this is a way to speed up the process
|
||||||
|
## by using the guid only, useful when using different files on multiple servers
|
||||||
|
GENERATE_LOCATIONS = "True"
|
||||||
|
|
||||||
|
## Map usernames between servers in the event that they are different, order does not matter
|
||||||
|
## Comma seperated for multiple options
|
||||||
|
USER_MAPPING = {"JellyUser":"jellyplex_watched"}
|
||||||
|
|
||||||
|
## Map libraries between servers in the even that they are different, order does not matter
|
||||||
|
## Comma seperated for multiple options
|
||||||
|
LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||||
|
|
||||||
|
|
||||||
|
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||||
|
## Comma seperated for multiple options
|
||||||
|
#BLACKLIST_LIBRARY = ""
|
||||||
|
#WHITELIST_LIBRARY = "Movies"
|
||||||
|
#BLACKLIST_LIBRARY_TYPE = "Series"
|
||||||
|
#WHITELIST_LIBRARY_TYPE = "Movies, movie"
|
||||||
|
#BLACKLIST_USERS = ""
|
||||||
|
WHITELIST_USERS = "jellyplex_watched"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Plex
|
||||||
|
|
||||||
|
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||||
|
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
PLEX_BASEURL = "http://localhost:32400"
|
||||||
|
|
||||||
|
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
PLEX_TOKEN = "6S28yhwKg4y-vAXYMi1c"
|
||||||
|
|
||||||
|
## If not using plex token then use username and password of the server admin along with the servername
|
||||||
|
## Comma seperated for multiple options
|
||||||
|
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||||
|
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||||
|
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||||
|
|
||||||
|
## Skip hostname validation for ssl certificates.
|
||||||
|
## Set to True if running into ssl certificate errors
|
||||||
|
SSL_BYPASS = "True"
|
||||||
|
|
||||||
|
# Jellyfin
|
||||||
|
|
||||||
|
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||||
|
|
||||||
|
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||||
|
|
||||||
|
# Emby
|
||||||
|
|
||||||
|
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
EMBY_BASEURL = "http://localhost:8097"
|
||||||
|
|
||||||
|
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||||
|
|
||||||
|
|
||||||
|
# Syncing Options
|
||||||
|
|
||||||
|
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||||
|
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||||
|
SYNC_FROM_PLEX_TO_JELLYFIN = "False"
|
||||||
|
SYNC_FROM_PLEX_TO_PLEX = "False"
|
||||||
|
SYNC_FROM_PLEX_TO_EMBY = "False"
|
||||||
|
|
||||||
|
SYNC_FROM_JELLYFIN_TO_PLEX = "False"
|
||||||
|
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "False"
|
||||||
|
SYNC_FROM_JELLYFIN_TO_EMBY = "False"
|
||||||
|
|
||||||
|
SYNC_FROM_EMBY_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_EMBY_TO_JELLYFIN = "True"
|
||||||
|
SYNC_FROM_EMBY_TO_EMBY = "True"
|
||||||
@@ -3,11 +3,8 @@
|
|||||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||||
DRYRUN = "True"
|
DRYRUN = "True"
|
||||||
|
|
||||||
## Additional logging information
|
|
||||||
DEBUG = "True"
|
|
||||||
|
|
||||||
## Debugging level, "info" is default, "debug" is more verbose
|
## Debugging level, "info" is default, "debug" is more verbose
|
||||||
DEBUG_LEVEL = "debug"
|
DEBUG_LEVEL = "trace"
|
||||||
|
|
||||||
## If set to true then the script will only run once and then exit
|
## If set to true then the script will only run once and then exit
|
||||||
RUN_ONLY_ONCE = "True"
|
RUN_ONLY_ONCE = "True"
|
||||||
@@ -62,11 +59,11 @@ WHITELIST_USERS = "jellyplex_watched"
|
|||||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||||
## Comma seperated list for multiple servers
|
## Comma seperated list for multiple servers
|
||||||
PLEX_BASEURL = "https://localhost:32400"
|
PLEX_BASEURL = "http://localhost:32400"
|
||||||
|
|
||||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||||
## Comma seperated list for multiple servers
|
## Comma seperated list for multiple servers
|
||||||
PLEX_TOKEN = "mVaCzSyd78uoWkCBzZ_Y"
|
PLEX_TOKEN = "6S28yhwKg4y-vAXYMi1c"
|
||||||
|
|
||||||
## If not using plex token then use username and password of the server admin along with the servername
|
## If not using plex token then use username and password of the server admin along with the servername
|
||||||
## Comma seperated for multiple options
|
## Comma seperated for multiple options
|
||||||
@@ -78,13 +75,6 @@ PLEX_TOKEN = "mVaCzSyd78uoWkCBzZ_Y"
|
|||||||
## Set to True if running into ssl certificate errors
|
## Set to True if running into ssl certificate errors
|
||||||
SSL_BYPASS = "True"
|
SSL_BYPASS = "True"
|
||||||
|
|
||||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
|
||||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
|
||||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
|
||||||
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
|
||||||
SYNC_FROM_PLEX_TO_PLEX = "True"
|
|
||||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
|
||||||
|
|
||||||
# Jellyfin
|
# Jellyfin
|
||||||
|
|
||||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||||
@@ -94,3 +84,30 @@ JELLYFIN_BASEURL = "http://localhost:8096"
|
|||||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||||
## Comma seperated list for multiple servers
|
## Comma seperated list for multiple servers
|
||||||
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||||
|
|
||||||
|
# Emby
|
||||||
|
|
||||||
|
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
EMBY_BASEURL = "http://localhost:8097"
|
||||||
|
|
||||||
|
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||||
|
|
||||||
|
|
||||||
|
# Syncing Options
|
||||||
|
|
||||||
|
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||||
|
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||||
|
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||||
|
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_PLEX_TO_EMBY = "True"
|
||||||
|
|
||||||
|
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||||
|
SYNC_FROM_JELLYFIN_TO_EMBY = "True"
|
||||||
|
|
||||||
|
SYNC_FROM_EMBY_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_EMBY_TO_JELLYFIN = "True"
|
||||||
|
SYNC_FROM_EMBY_TO_EMBY = "True"
|
||||||
113
test/ci_jellyfin.env
Normal file
113
test/ci_jellyfin.env
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
# Global Settings
|
||||||
|
|
||||||
|
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||||
|
DRYRUN = "True"
|
||||||
|
|
||||||
|
## Debugging level, "info" is default, "debug" is more verbose
|
||||||
|
DEBUG_LEVEL = "trace"
|
||||||
|
|
||||||
|
## If set to true then the script will only run once and then exit
|
||||||
|
RUN_ONLY_ONCE = "True"
|
||||||
|
|
||||||
|
## How often to run the script in seconds
|
||||||
|
SLEEP_DURATION = 10
|
||||||
|
|
||||||
|
## Log file where all output will be written to
|
||||||
|
LOG_FILE = "log.log"
|
||||||
|
|
||||||
|
## Mark file where all shows/movies that have been marked as played will be written to
|
||||||
|
MARK_FILE = "mark.log"
|
||||||
|
|
||||||
|
## Timeout for requests for jellyfin
|
||||||
|
REQUEST_TIMEOUT = 300
|
||||||
|
|
||||||
|
## Max threads for processing
|
||||||
|
MAX_THREADS = 2
|
||||||
|
|
||||||
|
## Generate guids
|
||||||
|
## Generating guids is a slow process, so this is a way to speed up the process
|
||||||
|
# by using the location only, useful when using same files on multiple servers
|
||||||
|
GENERATE_GUIDS = "True"
|
||||||
|
|
||||||
|
## Generate locations
|
||||||
|
## Generating locations is a slow process, so this is a way to speed up the process
|
||||||
|
## by using the guid only, useful when using different files on multiple servers
|
||||||
|
GENERATE_LOCATIONS = "True"
|
||||||
|
|
||||||
|
## Map usernames between servers in the event that they are different, order does not matter
|
||||||
|
## Comma seperated for multiple options
|
||||||
|
USER_MAPPING = {"JellyUser":"jellyplex_watched"}
|
||||||
|
|
||||||
|
## Map libraries between servers in the even that they are different, order does not matter
|
||||||
|
## Comma seperated for multiple options
|
||||||
|
LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||||
|
|
||||||
|
|
||||||
|
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||||
|
## Comma seperated for multiple options
|
||||||
|
#BLACKLIST_LIBRARY = ""
|
||||||
|
#WHITELIST_LIBRARY = "Movies"
|
||||||
|
#BLACKLIST_LIBRARY_TYPE = "Series"
|
||||||
|
#WHITELIST_LIBRARY_TYPE = "Movies, movie"
|
||||||
|
#BLACKLIST_USERS = ""
|
||||||
|
WHITELIST_USERS = "jellyplex_watched"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Plex
|
||||||
|
|
||||||
|
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||||
|
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
PLEX_BASEURL = "http://localhost:32400"
|
||||||
|
|
||||||
|
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
PLEX_TOKEN = "6S28yhwKg4y-vAXYMi1c"
|
||||||
|
|
||||||
|
## If not using plex token then use username and password of the server admin along with the servername
|
||||||
|
## Comma seperated for multiple options
|
||||||
|
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||||
|
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||||
|
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||||
|
|
||||||
|
## Skip hostname validation for ssl certificates.
|
||||||
|
## Set to True if running into ssl certificate errors
|
||||||
|
SSL_BYPASS = "True"
|
||||||
|
|
||||||
|
# Jellyfin
|
||||||
|
|
||||||
|
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||||
|
|
||||||
|
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||||
|
|
||||||
|
# Emby
|
||||||
|
|
||||||
|
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
EMBY_BASEURL = "http://localhost:8097"
|
||||||
|
|
||||||
|
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||||
|
|
||||||
|
|
||||||
|
# Syncing Options
|
||||||
|
|
||||||
|
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||||
|
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||||
|
SYNC_FROM_PLEX_TO_JELLYFIN = "False"
|
||||||
|
SYNC_FROM_PLEX_TO_PLEX = "False"
|
||||||
|
SYNC_FROM_PLEX_TO_EMBY = "False"
|
||||||
|
|
||||||
|
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||||
|
SYNC_FROM_JELLYFIN_TO_EMBY = "True"
|
||||||
|
|
||||||
|
SYNC_FROM_EMBY_TO_PLEX = "False"
|
||||||
|
SYNC_FROM_EMBY_TO_JELLYFIN = "False"
|
||||||
|
SYNC_FROM_EMBY_TO_EMBY = "False"
|
||||||
@@ -3,11 +3,8 @@
|
|||||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||||
DRYRUN = "True"
|
DRYRUN = "True"
|
||||||
|
|
||||||
## Additional logging information
|
|
||||||
DEBUG = "True"
|
|
||||||
|
|
||||||
## Debugging level, "info" is default, "debug" is more verbose
|
## Debugging level, "info" is default, "debug" is more verbose
|
||||||
DEBUG_LEVEL = "debug"
|
DEBUG_LEVEL = "trace"
|
||||||
|
|
||||||
## If set to true then the script will only run once and then exit
|
## If set to true then the script will only run once and then exit
|
||||||
RUN_ONLY_ONCE = "True"
|
RUN_ONLY_ONCE = "True"
|
||||||
@@ -62,11 +59,11 @@ WHITELIST_USERS = "jellyplex_watched"
|
|||||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||||
## Comma seperated list for multiple servers
|
## Comma seperated list for multiple servers
|
||||||
PLEX_BASEURL = "https://localhost:32400"
|
PLEX_BASEURL = "http://localhost:32400"
|
||||||
|
|
||||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||||
## Comma seperated list for multiple servers
|
## Comma seperated list for multiple servers
|
||||||
PLEX_TOKEN = "mVaCzSyd78uoWkCBzZ_Y"
|
PLEX_TOKEN = "6S28yhwKg4y-vAXYMi1c"
|
||||||
|
|
||||||
## If not using plex token then use username and password of the server admin along with the servername
|
## If not using plex token then use username and password of the server admin along with the servername
|
||||||
## Comma seperated for multiple options
|
## Comma seperated for multiple options
|
||||||
@@ -78,13 +75,6 @@ PLEX_TOKEN = "mVaCzSyd78uoWkCBzZ_Y"
|
|||||||
## Set to True if running into ssl certificate errors
|
## Set to True if running into ssl certificate errors
|
||||||
SSL_BYPASS = "True"
|
SSL_BYPASS = "True"
|
||||||
|
|
||||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
|
||||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
|
||||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
|
||||||
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
|
||||||
SYNC_FROM_PLEX_TO_PLEX = "True"
|
|
||||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
|
||||||
|
|
||||||
# Jellyfin
|
# Jellyfin
|
||||||
|
|
||||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||||
@@ -94,3 +84,30 @@ JELLYFIN_BASEURL = "http://localhost:8096"
|
|||||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||||
## Comma seperated list for multiple servers
|
## Comma seperated list for multiple servers
|
||||||
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||||
|
|
||||||
|
# Emby
|
||||||
|
|
||||||
|
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
EMBY_BASEURL = "http://localhost:8097"
|
||||||
|
|
||||||
|
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||||
|
|
||||||
|
|
||||||
|
# Syncing Options
|
||||||
|
|
||||||
|
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||||
|
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||||
|
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||||
|
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_PLEX_TO_EMBY = "True"
|
||||||
|
|
||||||
|
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||||
|
SYNC_FROM_JELLYFIN_TO_EMBY = "True"
|
||||||
|
|
||||||
|
SYNC_FROM_EMBY_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_EMBY_TO_JELLYFIN = "True"
|
||||||
|
SYNC_FROM_EMBY_TO_EMBY = "True"
|
||||||
113
test/ci_plex.env
Normal file
113
test/ci_plex.env
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
# Global Settings
|
||||||
|
|
||||||
|
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||||
|
DRYRUN = "True"
|
||||||
|
|
||||||
|
## Debugging level, "info" is default, "debug" is more verbose
|
||||||
|
DEBUG_LEVEL = "trace"
|
||||||
|
|
||||||
|
## If set to true then the script will only run once and then exit
|
||||||
|
RUN_ONLY_ONCE = "True"
|
||||||
|
|
||||||
|
## How often to run the script in seconds
|
||||||
|
SLEEP_DURATION = 10
|
||||||
|
|
||||||
|
## Log file where all output will be written to
|
||||||
|
LOG_FILE = "log.log"
|
||||||
|
|
||||||
|
## Mark file where all shows/movies that have been marked as played will be written to
|
||||||
|
MARK_FILE = "mark.log"
|
||||||
|
|
||||||
|
## Timeout for requests for jellyfin
|
||||||
|
REQUEST_TIMEOUT = 300
|
||||||
|
|
||||||
|
## Max threads for processing
|
||||||
|
MAX_THREADS = 2
|
||||||
|
|
||||||
|
## Generate guids
|
||||||
|
## Generating guids is a slow process, so this is a way to speed up the process
|
||||||
|
# by using the location only, useful when using same files on multiple servers
|
||||||
|
GENERATE_GUIDS = "True"
|
||||||
|
|
||||||
|
## Generate locations
|
||||||
|
## Generating locations is a slow process, so this is a way to speed up the process
|
||||||
|
## by using the guid only, useful when using different files on multiple servers
|
||||||
|
GENERATE_LOCATIONS = "True"
|
||||||
|
|
||||||
|
## Map usernames between servers in the event that they are different, order does not matter
|
||||||
|
## Comma seperated for multiple options
|
||||||
|
USER_MAPPING = {"JellyUser":"jellyplex_watched"}
|
||||||
|
|
||||||
|
## Map libraries between servers in the even that they are different, order does not matter
|
||||||
|
## Comma seperated for multiple options
|
||||||
|
LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||||
|
|
||||||
|
|
||||||
|
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||||
|
## Comma seperated for multiple options
|
||||||
|
#BLACKLIST_LIBRARY = ""
|
||||||
|
#WHITELIST_LIBRARY = "Movies"
|
||||||
|
#BLACKLIST_LIBRARY_TYPE = "Series"
|
||||||
|
#WHITELIST_LIBRARY_TYPE = "Movies, movie"
|
||||||
|
#BLACKLIST_USERS = ""
|
||||||
|
WHITELIST_USERS = "jellyplex_watched"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Plex
|
||||||
|
|
||||||
|
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||||
|
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
PLEX_BASEURL = "http://localhost:32400"
|
||||||
|
|
||||||
|
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
PLEX_TOKEN = "6S28yhwKg4y-vAXYMi1c"
|
||||||
|
|
||||||
|
## If not using plex token then use username and password of the server admin along with the servername
|
||||||
|
## Comma seperated for multiple options
|
||||||
|
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||||
|
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||||
|
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||||
|
|
||||||
|
## Skip hostname validation for ssl certificates.
|
||||||
|
## Set to True if running into ssl certificate errors
|
||||||
|
SSL_BYPASS = "True"
|
||||||
|
|
||||||
|
# Jellyfin
|
||||||
|
|
||||||
|
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||||
|
|
||||||
|
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||||
|
|
||||||
|
# Emby
|
||||||
|
|
||||||
|
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
EMBY_BASEURL = "http://localhost:8097"
|
||||||
|
|
||||||
|
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||||
|
|
||||||
|
|
||||||
|
# Syncing Options
|
||||||
|
|
||||||
|
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||||
|
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||||
|
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||||
|
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_PLEX_TO_EMBY = "True"
|
||||||
|
|
||||||
|
SYNC_FROM_JELLYFIN_TO_PLEX = "False"
|
||||||
|
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "False"
|
||||||
|
SYNC_FROM_JELLYFIN_TO_EMBY = "False"
|
||||||
|
|
||||||
|
SYNC_FROM_EMBY_TO_PLEX = "False"
|
||||||
|
SYNC_FROM_EMBY_TO_JELLYFIN = "False"
|
||||||
|
SYNC_FROM_EMBY_TO_EMBY = "False"
|
||||||
@@ -3,11 +3,8 @@
|
|||||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||||
DRYRUN = "False"
|
DRYRUN = "False"
|
||||||
|
|
||||||
## Additional logging information
|
|
||||||
DEBUG = "True"
|
|
||||||
|
|
||||||
## Debugging level, "info" is default, "debug" is more verbose
|
## Debugging level, "info" is default, "debug" is more verbose
|
||||||
DEBUG_LEVEL = "debug"
|
DEBUG_LEVEL = "trace"
|
||||||
|
|
||||||
## If set to true then the script will only run once and then exit
|
## If set to true then the script will only run once and then exit
|
||||||
RUN_ONLY_ONCE = "True"
|
RUN_ONLY_ONCE = "True"
|
||||||
@@ -62,11 +59,11 @@ WHITELIST_USERS = "jellyplex_watched"
|
|||||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||||
## Comma seperated list for multiple servers
|
## Comma seperated list for multiple servers
|
||||||
PLEX_BASEURL = "https://localhost:32400"
|
PLEX_BASEURL = "http://localhost:32400"
|
||||||
|
|
||||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||||
## Comma seperated list for multiple servers
|
## Comma seperated list for multiple servers
|
||||||
PLEX_TOKEN = "mVaCzSyd78uoWkCBzZ_Y"
|
PLEX_TOKEN = "6S28yhwKg4y-vAXYMi1c"
|
||||||
|
|
||||||
## If not using plex token then use username and password of the server admin along with the servername
|
## If not using plex token then use username and password of the server admin along with the servername
|
||||||
## Comma seperated for multiple options
|
## Comma seperated for multiple options
|
||||||
@@ -78,13 +75,6 @@ PLEX_TOKEN = "mVaCzSyd78uoWkCBzZ_Y"
|
|||||||
## Set to True if running into ssl certificate errors
|
## Set to True if running into ssl certificate errors
|
||||||
SSL_BYPASS = "True"
|
SSL_BYPASS = "True"
|
||||||
|
|
||||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
|
||||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
|
||||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
|
||||||
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
|
||||||
SYNC_FROM_PLEX_TO_PLEX = "True"
|
|
||||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
|
||||||
|
|
||||||
# Jellyfin
|
# Jellyfin
|
||||||
|
|
||||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||||
@@ -94,3 +84,30 @@ JELLYFIN_BASEURL = "http://localhost:8096"
|
|||||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||||
## Comma seperated list for multiple servers
|
## Comma seperated list for multiple servers
|
||||||
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||||
|
|
||||||
|
# Emby
|
||||||
|
|
||||||
|
## Emby server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
EMBY_BASEURL = "http://localhost:8097"
|
||||||
|
|
||||||
|
## Emby api token, created manually by logging in to the Emby server admin dashboard and creating an api key
|
||||||
|
## Comma seperated list for multiple servers
|
||||||
|
EMBY_TOKEN = "ed9507cba8d14d469ae4d58e33afc515"
|
||||||
|
|
||||||
|
|
||||||
|
# Syncing Options
|
||||||
|
|
||||||
|
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||||
|
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||||
|
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||||
|
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_PLEX_TO_EMBY = "True"
|
||||||
|
|
||||||
|
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||||
|
SYNC_FROM_JELLYFIN_TO_EMBY = "True"
|
||||||
|
|
||||||
|
SYNC_FROM_EMBY_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_EMBY_TO_JELLYFIN = "True"
|
||||||
|
SYNC_FROM_EMBY_TO_EMBY = "True"
|
||||||
@@ -18,12 +18,12 @@ from src.black_white import setup_black_white_lists
|
|||||||
|
|
||||||
def test_setup_black_white_lists():
|
def test_setup_black_white_lists():
|
||||||
# Simple
|
# Simple
|
||||||
blacklist_library = "library1, library2"
|
blacklist_library = ["library1", "library2"]
|
||||||
whitelist_library = "library1, library2"
|
whitelist_library = ["library1", "library2"]
|
||||||
blacklist_library_type = "library_type1, library_type2"
|
blacklist_library_type = ["library_type1", "library_type2"]
|
||||||
whitelist_library_type = "library_type1, library_type2"
|
whitelist_library_type = ["library_type1", "library_type2"]
|
||||||
blacklist_users = "user1, user2"
|
blacklist_users = ["user1", "user2"]
|
||||||
whitelist_users = "user1, user2"
|
whitelist_users = ["user1", "user2"]
|
||||||
|
|
||||||
(
|
(
|
||||||
results_blacklist_library,
|
results_blacklist_library,
|
||||||
@@ -48,6 +48,15 @@ def test_setup_black_white_lists():
|
|||||||
assert return_blacklist_users == ["user1", "user2"]
|
assert return_blacklist_users == ["user1", "user2"]
|
||||||
assert return_whitelist_users == ["user1", "user2"]
|
assert return_whitelist_users == ["user1", "user2"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_library_mapping_black_white_list():
|
||||||
|
blacklist_library = ["library1", "library2"]
|
||||||
|
whitelist_library = ["library1", "library2"]
|
||||||
|
blacklist_library_type = ["library_type1", "library_type2"]
|
||||||
|
whitelist_library_type = ["library_type1", "library_type2"]
|
||||||
|
blacklist_users = ["user1", "user2"]
|
||||||
|
whitelist_users = ["user1", "user2"]
|
||||||
|
|
||||||
# Library Mapping and user mapping
|
# Library Mapping and user mapping
|
||||||
library_mapping = {"library1": "library3"}
|
library_mapping = {"library1": "library3"}
|
||||||
user_mapping = {"user1": "user3"}
|
user_mapping = {"user1": "user3"}
|
||||||
|
|||||||
@@ -21,10 +21,6 @@ from src.library import (
|
|||||||
check_skip_logic,
|
check_skip_logic,
|
||||||
check_blacklist_logic,
|
check_blacklist_logic,
|
||||||
check_whitelist_logic,
|
check_whitelist_logic,
|
||||||
show_title_dict,
|
|
||||||
episode_title_dict,
|
|
||||||
movies_title_dict,
|
|
||||||
generate_library_guids_dict,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
blacklist_library = ["TV Shows"]
|
blacklist_library = ["TV Shows"]
|
||||||
@@ -42,21 +38,19 @@ show_list = {
|
|||||||
("tvdb", "392256"),
|
("tvdb", "392256"),
|
||||||
("title", "The Last of Us"),
|
("title", "The Last of Us"),
|
||||||
}
|
}
|
||||||
): {
|
): [
|
||||||
"Season 1": [
|
{
|
||||||
{
|
"imdb": "tt11957006",
|
||||||
"imdb": "tt11957006",
|
"tmdb": "2181581",
|
||||||
"tmdb": "2181581",
|
"tvdb": "8444132",
|
||||||
"tvdb": "8444132",
|
"locations": (
|
||||||
"locations": (
|
(
|
||||||
(
|
"The Last of Us - S01E01 - When You're Lost in the Darkness WEBDL-1080p.mkv",
|
||||||
"The Last of Us - S01E01 - When You're Lost in the Darkness WEBDL-1080p.mkv",
|
)
|
||||||
)
|
),
|
||||||
),
|
"status": {"completed": True, "time": 0},
|
||||||
"status": {"completed": True, "time": 0},
|
}
|
||||||
}
|
]
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
movie_list = [
|
movie_list = [
|
||||||
{
|
{
|
||||||
@@ -83,7 +77,6 @@ episode_titles = {
|
|||||||
"tvdb": ["8444132"],
|
"tvdb": ["8444132"],
|
||||||
"completed": [True],
|
"completed": [True],
|
||||||
"time": [0],
|
"time": [0],
|
||||||
"season": ["Season 1"],
|
|
||||||
"show": [
|
"show": [
|
||||||
{
|
{
|
||||||
"imdb": "tt3581920",
|
"imdb": "tt3581920",
|
||||||
@@ -283,45 +276,3 @@ def test_check_whitelist_logic():
|
|||||||
)
|
)
|
||||||
|
|
||||||
assert skip_reason is None
|
assert skip_reason is None
|
||||||
|
|
||||||
|
|
||||||
def test_show_title_dict():
|
|
||||||
show_titles_dict = show_title_dict(show_list)
|
|
||||||
|
|
||||||
assert show_titles_dict == show_titles
|
|
||||||
|
|
||||||
|
|
||||||
def test_episode_title_dict():
|
|
||||||
episode_titles_dict = episode_title_dict(show_list)
|
|
||||||
|
|
||||||
assert episode_titles_dict == episode_titles
|
|
||||||
|
|
||||||
|
|
||||||
def test_movies_title_dict():
|
|
||||||
movies_titles_dict = movies_title_dict(movie_list)
|
|
||||||
|
|
||||||
assert movies_titles_dict == movie_titles
|
|
||||||
|
|
||||||
|
|
||||||
def test_generate_library_guids_dict():
|
|
||||||
# Test with shows
|
|
||||||
(
|
|
||||||
show_titles_dict,
|
|
||||||
episode_titles_dict,
|
|
||||||
movies_titles_dict,
|
|
||||||
) = generate_library_guids_dict(show_list)
|
|
||||||
|
|
||||||
assert show_titles_dict == show_titles
|
|
||||||
assert episode_titles_dict == episode_titles
|
|
||||||
assert movies_titles_dict == {}
|
|
||||||
|
|
||||||
# Test with movies
|
|
||||||
(
|
|
||||||
show_titles_dict,
|
|
||||||
episode_titles_dict,
|
|
||||||
movies_titles_dict,
|
|
||||||
) = generate_library_guids_dict(movie_list)
|
|
||||||
|
|
||||||
assert show_titles_dict == {}
|
|
||||||
assert episode_titles_dict == {}
|
|
||||||
assert movies_titles_dict == movie_titles
|
|
||||||
|
|||||||
@@ -1,78 +0,0 @@
|
|||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# getting the name of the directory
|
|
||||||
# where the this file is present.
|
|
||||||
current = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
|
|
||||||
# Getting the parent directory name
|
|
||||||
# where the current directory is present.
|
|
||||||
parent = os.path.dirname(current)
|
|
||||||
|
|
||||||
# adding the parent directory to
|
|
||||||
# the sys.path.
|
|
||||||
sys.path.append(parent)
|
|
||||||
|
|
||||||
from src.black_white import setup_black_white_lists
|
|
||||||
|
|
||||||
|
|
||||||
def test_setup_black_white_lists():
|
|
||||||
# Simple
|
|
||||||
blacklist_library = "library1, library2"
|
|
||||||
whitelist_library = "library1, library2"
|
|
||||||
blacklist_library_type = "library_type1, library_type2"
|
|
||||||
whitelist_library_type = "library_type1, library_type2"
|
|
||||||
blacklist_users = "user1, user2"
|
|
||||||
whitelist_users = "user1, user2"
|
|
||||||
|
|
||||||
(
|
|
||||||
results_blacklist_library,
|
|
||||||
return_whitelist_library,
|
|
||||||
return_blacklist_library_type,
|
|
||||||
return_whitelist_library_type,
|
|
||||||
return_blacklist_users,
|
|
||||||
return_whitelist_users,
|
|
||||||
) = setup_black_white_lists(
|
|
||||||
blacklist_library,
|
|
||||||
whitelist_library,
|
|
||||||
blacklist_library_type,
|
|
||||||
whitelist_library_type,
|
|
||||||
blacklist_users,
|
|
||||||
whitelist_users,
|
|
||||||
)
|
|
||||||
|
|
||||||
assert results_blacklist_library == ["library1", "library2"]
|
|
||||||
assert return_whitelist_library == ["library1", "library2"]
|
|
||||||
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
|
||||||
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
|
||||||
assert return_blacklist_users == ["user1", "user2"]
|
|
||||||
assert return_whitelist_users == ["user1", "user2"]
|
|
||||||
|
|
||||||
# Library Mapping and user mapping
|
|
||||||
library_mapping = {"library1": "library3"}
|
|
||||||
user_mapping = {"user1": "user3"}
|
|
||||||
|
|
||||||
(
|
|
||||||
results_blacklist_library,
|
|
||||||
return_whitelist_library,
|
|
||||||
return_blacklist_library_type,
|
|
||||||
return_whitelist_library_type,
|
|
||||||
return_blacklist_users,
|
|
||||||
return_whitelist_users,
|
|
||||||
) = setup_black_white_lists(
|
|
||||||
blacklist_library,
|
|
||||||
whitelist_library,
|
|
||||||
blacklist_library_type,
|
|
||||||
whitelist_library_type,
|
|
||||||
blacklist_users,
|
|
||||||
whitelist_users,
|
|
||||||
library_mapping,
|
|
||||||
user_mapping,
|
|
||||||
)
|
|
||||||
|
|
||||||
assert results_blacklist_library == ["library1", "library2", "library3"]
|
|
||||||
assert return_whitelist_library == ["library1", "library2", "library3"]
|
|
||||||
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
|
||||||
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
|
||||||
assert return_blacklist_users == ["user1", "user2", "user3"]
|
|
||||||
assert return_whitelist_users == ["user1", "user2", "user3"]
|
|
||||||
1323
test/test_watched.py
1323
test/test_watched.py
File diff suppressed because it is too large
Load Diff
@@ -1,75 +1,216 @@
|
|||||||
# Check the mark.log file that is generated by the CI to make sure it contains the expected values
|
import argparse
|
||||||
|
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
from loguru import logger
|
||||||
|
from collections import Counter
|
||||||
|
|
||||||
|
|
||||||
|
class MarkLogError(Exception):
|
||||||
|
"""Custom exception for mark.log validation failures."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Check the mark.log file that is generated by the CI to make sure it contains the expected values"
|
||||||
|
)
|
||||||
|
group = parser.add_mutually_exclusive_group(required=True)
|
||||||
|
group.add_argument(
|
||||||
|
"--guids", action="store_true", help="Check the mark.log file for guids"
|
||||||
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"--locations", action="store_true", help="Check the mark.log file for locations"
|
||||||
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"--write", action="store_true", help="Check the mark.log file for write-run"
|
||||||
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"--plex", action="store_true", help="Check the mark.log file for Plex"
|
||||||
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"--jellyfin", action="store_true", help="Check the mark.log file for Jellyfin"
|
||||||
|
)
|
||||||
|
group.add_argument(
|
||||||
|
"--emby", action="store_true", help="Check the mark.log file for Emby"
|
||||||
|
)
|
||||||
|
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
def read_marklog():
|
def read_marklog():
|
||||||
marklog = os.path.join(os.getcwd(), "mark.log")
|
marklog = os.path.join(os.getcwd(), "mark.log")
|
||||||
with open(marklog, "r") as f:
|
try:
|
||||||
lines = f.readlines()
|
with open(marklog, "r") as f:
|
||||||
return lines
|
lines = [line.strip() for line in f if line.strip()]
|
||||||
|
return lines
|
||||||
|
except Exception as e:
|
||||||
|
raise MarkLogError(f"Error reading {marklog}: {e}")
|
||||||
|
|
||||||
|
|
||||||
def check_marklog(lines, expected_values):
|
def check_marklog(lines, expected_values):
|
||||||
try:
|
found_counter = Counter(lines)
|
||||||
# Check to make sure the marklog contains all the expected values and nothing else
|
expected_counter = Counter(expected_values)
|
||||||
found_values = []
|
|
||||||
for line in lines:
|
|
||||||
# Remove the newline character
|
|
||||||
line = line.strip()
|
|
||||||
if line not in expected_values:
|
|
||||||
raise Exception("Line not found in marklog: " + line)
|
|
||||||
|
|
||||||
found_values.append(line)
|
# Determine missing and extra items by comparing counts
|
||||||
|
missing = expected_counter - found_counter
|
||||||
|
extra = found_counter - expected_counter
|
||||||
|
|
||||||
# Check to make sure the marklog contains the same number of values as the expected values
|
if missing or extra:
|
||||||
if len(found_values) != len(expected_values):
|
if missing:
|
||||||
raise Exception(
|
logger.error("Missing expected entries (with counts):")
|
||||||
"Marklog did not contain the same number of values as the expected values, found "
|
for entry, count in missing.items():
|
||||||
+ str(len(found_values))
|
logger.error(f" {entry}: missing {count} time(s)")
|
||||||
+ " values, expected "
|
if extra:
|
||||||
+ str(len(expected_values))
|
logger.error("Unexpected extra entries found (with counts):")
|
||||||
+ " values"
|
for entry, count in extra.items():
|
||||||
)
|
logger.error(f" {entry}: found {count} extra time(s)")
|
||||||
|
|
||||||
# Check that the two lists contain the same values
|
logger.error(
|
||||||
if sorted(found_values) != sorted(expected_values):
|
f"Entry count mismatch: found {len(lines)} entries, expected {len(expected_values)} entries."
|
||||||
raise Exception(
|
)
|
||||||
"Marklog did not contain the same values as the expected values, found:\n"
|
logger.error("Full mark.log content:")
|
||||||
+ "\n".join(sorted(found_values))
|
for line in sorted(lines):
|
||||||
+ "\n\nExpected:\n"
|
logger.error(f" {line}")
|
||||||
+ "\n".join(sorted(expected_values))
|
raise MarkLogError("mark.log validation failed.")
|
||||||
)
|
|
||||||
|
|
||||||
return True
|
return True
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
expected_values = [
|
args = parse_args()
|
||||||
"jellyplex_watched/Movies/Five Nights at Freddy's",
|
|
||||||
"jellyplex_watched/Movies/The Hunger Games: The Ballad of Songbirds & Snakes/301215",
|
# Expected values defined for each check
|
||||||
"jellyplex_watched/TV Shows/Doctor Who (2005)/Rose",
|
expected_jellyfin = [
|
||||||
"jellyplex_watched/TV Shows/Doctor Who (2005)/The End of the World/300670",
|
"Plex/JellyPlex-CI/jellyplex_watched/Custom Movies/Movie Two (2021)",
|
||||||
"jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Aftermath",
|
"Plex/JellyPlex-CI/jellyplex_watched/Custom TV Shows/Greatest Show Ever 3000/Episode 2",
|
||||||
"jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Departure/300741",
|
"Plex/JellyPlex-CI/jellyplex_watched/Movies/Five Nights at Freddy's",
|
||||||
"JellyUser/Movies/Big Buck Bunny",
|
"Plex/JellyPlex-CI/jellyplex_watched/Movies/The Hunger Games: The Ballad of Songbirds & Snakes/301215",
|
||||||
"JellyUser/Shows/Doctor Who/The Unquiet Dead",
|
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Doctor Who (2005)/Rose",
|
||||||
"JellyUser/Shows/Monarch: Legacy of Monsters/Secrets and Lies",
|
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Doctor Who (2005)/The End of the World/300670",
|
||||||
|
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Aftermath",
|
||||||
|
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Departure/300741",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Custom Movies/Movie Two",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Custom TV Shows/Greatest Show Ever (3000)/S01E02",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Movies/The Family Plan",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Movies/Five Nights at Freddy's",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Movies/The Hunger Games: The Ballad of Songbirds & Snakes/5",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/Rose",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/The End of the World/5",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Departure/5",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/The Way Out",
|
||||||
|
]
|
||||||
|
expected_emby = [
|
||||||
|
"Plex/JellyPlex-CI/jellyplex_watched/Custom Movies/Movie Three (2022)",
|
||||||
|
"Plex/JellyPlex-CI/jellyplex_watched/Custom TV Shows/Greatest Show Ever 3000/Episode 3",
|
||||||
|
"Plex/JellyPlex-CI/jellyplex_watched/Movies/Tears of Steel",
|
||||||
|
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Aftermath",
|
||||||
|
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Parallels and Interiors/240429",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Custom Movies/Movie Three (2022)",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Custom TV Shows/Greatest Show Ever (3000)/S01E03",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Movies/Tears of Steel",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Monarch: Legacy of Monsters/Parallels and Interiors/4",
|
||||||
|
]
|
||||||
|
expected_plex = [
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Movies/Big Buck Bunny",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Movies/Killers of the Flower Moon/4",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Custom TV Shows/Greatest Show Ever (3000)/S01E01",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Doctor Who/The Unquiet Dead",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Doctor Who/Aliens of London (1)/4",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Monarch: Legacy of Monsters/Secrets and Lies",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Monarch: Legacy of Monsters/Parallels and Interiors/4",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Custom Movies/Movie One (2020)",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Movies/Big Buck Bunny",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Movies/The Family Plan",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Movies/Killers of the Flower Moon/4",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Custom TV Shows/Greatest Show Ever (3000)/S01E01",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/The Unquiet Dead",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/Aliens of London (1)/4",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Secrets and Lies",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/The Way Out",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Custom Movies/Movie One",
|
||||||
]
|
]
|
||||||
|
|
||||||
# Triple the expected values because the CI runs three times
|
expected_locations = expected_emby + expected_plex + expected_jellyfin
|
||||||
expected_values = expected_values * 3
|
# Remove Custom Movies/TV Shows as they should not have guids
|
||||||
|
expected_guids = [item for item in expected_locations if "Custom" not in item]
|
||||||
|
|
||||||
lines = read_marklog()
|
expected_write = [
|
||||||
if not check_marklog(lines, expected_values):
|
"Plex/JellyPlex-CI/jellyplex_watched/Custom Movies/Movie Two (2021)",
|
||||||
print("Failed to validate marklog")
|
"Plex/JellyPlex-CI/jellyplex_watched/Custom TV Shows/Greatest Show Ever 3000/Episode 2",
|
||||||
exit(1)
|
"Plex/JellyPlex-CI/jellyplex_watched/Movies/Five Nights at Freddy's",
|
||||||
|
"Plex/JellyPlex-CI/jellyplex_watched/Movies/The Hunger Games: The Ballad of Songbirds & Snakes/301215",
|
||||||
|
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Doctor Who (2005)/Rose",
|
||||||
|
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Doctor Who (2005)/The End of the World/300670",
|
||||||
|
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Aftermath",
|
||||||
|
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Departure/300741",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Movies/Big Buck Bunny",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Movies/Killers of the Flower Moon/4",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Custom TV Shows/Greatest Show Ever (3000)/S01E01",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Doctor Who/The Unquiet Dead",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Doctor Who/Aliens of London (1)/4",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Monarch: Legacy of Monsters/Secrets and Lies",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Monarch: Legacy of Monsters/Parallels and Interiors/4",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Custom Movies/Movie One (2020)",
|
||||||
|
"Plex/JellyPlex-CI/jellyplex_watched/Custom Movies/Movie Three (2022)",
|
||||||
|
"Plex/JellyPlex-CI/jellyplex_watched/Custom TV Shows/Greatest Show Ever 3000/Episode 3",
|
||||||
|
"Plex/JellyPlex-CI/jellyplex_watched/Movies/Tears of Steel",
|
||||||
|
"Plex/JellyPlex-CI/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Parallels and Interiors/240429",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Movies/Big Buck Bunny",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Movies/The Family Plan",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Movies/Five Nights at Freddy's",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Movies/The Hunger Games: The Ballad of Songbirds & Snakes/5",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Movies/Killers of the Flower Moon/4",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Custom TV Shows/Greatest Show Ever (3000)/S01E01",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Custom TV Shows/Greatest Show Ever (3000)/S01E02",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/Rose",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/The End of the World/5",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/The Unquiet Dead",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/TV Shows/Doctor Who (2005)/Aliens of London (1)/4",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Departure/5",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Secrets and Lies",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/The Way Out",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Custom Movies/Movie One",
|
||||||
|
"Emby/Emby-Server/jellyplex_watched/Custom Movies/Movie Two",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Custom Movies/Movie Three (2022)",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Custom TV Shows/Greatest Show Ever (3000)/S01E03",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Movies/Tears of Steel",
|
||||||
|
"Jellyfin/Jellyfin-Server/JellyUser/Shows/Monarch: Legacy of Monsters/Parallels and Interiors/4",
|
||||||
|
]
|
||||||
|
|
||||||
print("Successfully validated marklog")
|
# Determine which expected values to use based on the command-line flag
|
||||||
exit(0)
|
if args.guids:
|
||||||
|
expected_values = expected_guids
|
||||||
|
check_type = "GUIDs"
|
||||||
|
elif args.locations:
|
||||||
|
expected_values = expected_locations
|
||||||
|
check_type = "locations"
|
||||||
|
elif args.write:
|
||||||
|
expected_values = expected_write
|
||||||
|
check_type = "write-run"
|
||||||
|
elif args.plex:
|
||||||
|
expected_values = expected_plex
|
||||||
|
check_type = "Plex"
|
||||||
|
elif args.jellyfin:
|
||||||
|
expected_values = expected_jellyfin
|
||||||
|
check_type = "Jellyfin"
|
||||||
|
elif args.emby:
|
||||||
|
expected_values = expected_emby
|
||||||
|
check_type = "Emby"
|
||||||
|
else:
|
||||||
|
raise MarkLogError("No server specified")
|
||||||
|
|
||||||
|
logger.info(f"Validating mark.log for {check_type}...")
|
||||||
|
|
||||||
|
try:
|
||||||
|
lines = read_marklog()
|
||||||
|
check_marklog(lines, expected_values)
|
||||||
|
except MarkLogError as e:
|
||||||
|
logger.error(e)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
logger.success("Successfully validated mark.log")
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
305
uv.lock
generated
Normal file
305
uv.lock
generated
Normal file
@@ -0,0 +1,305 @@
|
|||||||
|
version = 1
|
||||||
|
revision = 1
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "annotated-types"
|
||||||
|
version = "0.7.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "certifi"
|
||||||
|
version = "2025.1.31"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "charset-normalizer"
|
||||||
|
version = "3.4.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "colorama"
|
||||||
|
version = "0.4.6"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "idna"
|
||||||
|
version = "3.10"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "iniconfig"
|
||||||
|
version = "2.0.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "jellyplex-watched"
|
||||||
|
version = "6.1.2"
|
||||||
|
source = { virtual = "." }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "loguru" },
|
||||||
|
{ name = "packaging" },
|
||||||
|
{ name = "plexapi" },
|
||||||
|
{ name = "pydantic" },
|
||||||
|
{ name = "python-dotenv" },
|
||||||
|
{ name = "requests" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dev-dependencies]
|
||||||
|
dev = [
|
||||||
|
{ name = "pytest" },
|
||||||
|
]
|
||||||
|
lint = [
|
||||||
|
{ name = "ruff" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.metadata]
|
||||||
|
requires-dist = [
|
||||||
|
{ name = "loguru", specifier = ">=0.7.3" },
|
||||||
|
{ name = "packaging", specifier = "==24.2" },
|
||||||
|
{ name = "plexapi", specifier = "==4.16.1" },
|
||||||
|
{ name = "pydantic", specifier = "==2.10.6" },
|
||||||
|
{ name = "python-dotenv", specifier = "==1.0.0" },
|
||||||
|
{ name = "requests", specifier = "==2.32.3" },
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.metadata.requires-dev]
|
||||||
|
dev = [{ name = "pytest", specifier = ">=8.3.4" }]
|
||||||
|
lint = [{ name = "ruff", specifier = ">=0.9.6" }]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "loguru"
|
||||||
|
version = "0.7.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||||
|
{ name = "win32-setctime", marker = "sys_platform == 'win32'" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "packaging"
|
||||||
|
version = "24.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "plexapi"
|
||||||
|
version = "4.16.1"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "requests" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/66/1c/beeaf8dd489dad13ca4310a7bd9c601da6c6831e7c8ac61a45aadccb742d/plexapi-4.16.1.tar.gz", hash = "sha256:8e62d727e67b69994770196cd83a57783e9194d735aa347f682b4534ce6f0565", size = 153460 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ba/07/20b8a50b8b78374a49685dfe12739c27a9cd440b13913c2cbeeb50470d1e/PlexAPI-4.16.1-py3-none-any.whl", hash = "sha256:87432226c4cd682b5780b01e8def313285c52bdd57c8e72f66a5cef73ce64530", size = 165325 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pluggy"
|
||||||
|
version = "1.5.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pydantic"
|
||||||
|
version = "2.10.6"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "annotated-types" },
|
||||||
|
{ name = "pydantic-core" },
|
||||||
|
{ name = "typing-extensions" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/b7/ae/d5220c5c52b158b1de7ca89fc5edb72f304a70a4c540c84c8844bf4008de/pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236", size = 761681 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f4/3c/8cc1cc84deffa6e25d2d0c688ebb80635dfdbf1dbea3e30c541c8cf4d860/pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584", size = 431696 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pydantic-core"
|
||||||
|
version = "2.27.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "typing-extensions" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d6/74/51c8a5482ca447871c93e142d9d4a92ead74de6c8dc5e66733e22c9bba89/pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0", size = 1893127 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/d3/f3/c97e80721735868313c58b89d2de85fa80fe8dfeeed84dc51598b92a135e/pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef", size = 1811340 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9e/91/840ec1375e686dbae1bd80a9e46c26a1e0083e1186abc610efa3d9a36180/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7", size = 1822900 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f6/31/4240bc96025035500c18adc149aa6ffdf1a0062a4b525c932065ceb4d868/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934", size = 1869177 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fa/20/02fbaadb7808be578317015c462655c317a77a7c8f0ef274bc016a784c54/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6", size = 2038046 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/06/86/7f306b904e6c9eccf0668248b3f272090e49c275bc488a7b88b0823444a4/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c", size = 2685386 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/8d/f0/49129b27c43396581a635d8710dae54a791b17dfc50c70164866bbf865e3/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2", size = 1997060 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/0d/0f/943b4af7cd416c477fd40b187036c4f89b416a33d3cc0ab7b82708a667aa/pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4", size = 2004870 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/35/40/aea70b5b1a63911c53a4c8117c0a828d6790483f858041f47bab0b779f44/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3", size = 1999822 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f2/b3/807b94fd337d58effc5498fd1a7a4d9d59af4133e83e32ae39a96fddec9d/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4", size = 2130364 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/fc/df/791c827cd4ee6efd59248dca9369fb35e80a9484462c33c6649a8d02b565/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57", size = 2158303 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/9b/67/4e197c300976af185b7cef4c02203e175fb127e414125916bf1128b639a9/pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc", size = 1834064 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/1f/ea/cd7209a889163b8dcca139fe32b9687dd05249161a3edda62860430457a5/pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9", size = 1989046 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/bc/49/c54baab2f4658c26ac633d798dab66b4c3a9bbf47cff5284e9c182f4137a/pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b", size = 1885092 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/41/b1/9bc383f48f8002f99104e3acff6cba1231b29ef76cfa45d1506a5cad1f84/pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b", size = 1892709 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/10/6c/e62b8657b834f3eb2961b49ec8e301eb99946245e70bf42c8817350cbefc/pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154", size = 1811273 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ba/15/52cfe49c8c986e081b863b102d6b859d9defc63446b642ccbbb3742bf371/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9", size = 1823027 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b1/1c/b6f402cfc18ec0024120602bdbcebc7bdd5b856528c013bd4d13865ca473/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9", size = 1868888 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/bd/7b/8cb75b66ac37bc2975a3b7de99f3c6f355fcc4d89820b61dffa8f1e81677/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1", size = 2037738 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c8/f1/786d8fe78970a06f61df22cba58e365ce304bf9b9f46cc71c8c424e0c334/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a", size = 2685138 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a6/74/d12b2cd841d8724dc8ffb13fc5cef86566a53ed358103150209ecd5d1999/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e", size = 1997025 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a0/6e/940bcd631bc4d9a06c9539b51f070b66e8f370ed0933f392db6ff350d873/pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4", size = 2004633 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/50/cc/a46b34f1708d82498c227d5d80ce615b2dd502ddcfd8376fc14a36655af1/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27", size = 1999404 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ca/2d/c365cfa930ed23bc58c41463bae347d1005537dc8db79e998af8ba28d35e/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee", size = 2130130 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f4/d7/eb64d015c350b7cdb371145b54d96c919d4db516817f31cd1c650cae3b21/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1", size = 2157946 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a4/99/bddde3ddde76c03b65dfd5a66ab436c4e58ffc42927d4ff1198ffbf96f5f/pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130", size = 1834387 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/71/47/82b5e846e01b26ac6f1893d3c5f9f3a2eb6ba79be26eef0b759b4fe72946/pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee", size = 1990453 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/51/b2/b2b50d5ecf21acf870190ae5d093602d95f66c9c31f9d5de6062eb329ad1/pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b", size = 1885186 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pytest"
|
||||||
|
version = "8.3.4"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||||
|
{ name = "iniconfig" },
|
||||||
|
{ name = "packaging" },
|
||||||
|
{ name = "pluggy" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/05/35/30e0d83068951d90a01852cb1cef56e5d8a09d20c7f511634cc2f7e0372a/pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761", size = 1445919 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/11/92/76a1c94d3afee238333bc0a42b82935dd8f9cf8ce9e336ff87ee14d9e1cf/pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6", size = 343083 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "python-dotenv"
|
||||||
|
version = "1.0.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/31/06/1ef763af20d0572c032fa22882cfbfb005fba6e7300715a37840858c919e/python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba", size = 37399 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/44/2f/62ea1c8b593f4e093cc1a7768f0d46112107e790c3e478532329e434f00b/python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a", size = 19482 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "requests"
|
||||||
|
version = "2.32.3"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
dependencies = [
|
||||||
|
{ name = "certifi" },
|
||||||
|
{ name = "charset-normalizer" },
|
||||||
|
{ name = "idna" },
|
||||||
|
{ name = "urllib3" },
|
||||||
|
]
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "ruff"
|
||||||
|
version = "0.9.6"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/2a/e1/e265aba384343dd8ddd3083f5e33536cd17e1566c41453a5517b5dd443be/ruff-0.9.6.tar.gz", hash = "sha256:81761592f72b620ec8fa1068a6fd00e98a5ebee342a3642efd84454f3031dca9", size = 3639454 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/76/e3/3d2c022e687e18cf5d93d6bfa2722d46afc64eaa438c7fbbdd603b3597be/ruff-0.9.6-py3-none-linux_armv6l.whl", hash = "sha256:2f218f356dd2d995839f1941322ff021c72a492c470f0b26a34f844c29cdf5ba", size = 11714128 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e1/22/aff073b70f95c052e5c58153cba735748c9e70107a77d03420d7850710a0/ruff-0.9.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b908ff4df65dad7b251c9968a2e4560836d8f5487c2f0cc238321ed951ea0504", size = 11682539 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/75/a7/f5b7390afd98a7918582a3d256cd3e78ba0a26165a467c1820084587cbf9/ruff-0.9.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b109c0ad2ececf42e75fa99dc4043ff72a357436bb171900714a9ea581ddef83", size = 11132512 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a6/e3/45de13ef65047fea2e33f7e573d848206e15c715e5cd56095589a7733d04/ruff-0.9.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1de4367cca3dac99bcbd15c161404e849bb0bfd543664db39232648dc00112dc", size = 11929275 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/7d/f2/23d04cd6c43b2e641ab961ade8d0b5edb212ecebd112506188c91f2a6e6c/ruff-0.9.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3ee4d7c2c92ddfdaedf0bf31b2b176fa7aa8950efc454628d477394d35638b", size = 11466502 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/b5/6f/3a8cf166f2d7f1627dd2201e6cbc4cb81f8b7d58099348f0c1ff7b733792/ruff-0.9.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dc1edd1775270e6aa2386119aea692039781429f0be1e0949ea5884e011aa8e", size = 12676364 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f5/c4/db52e2189983c70114ff2b7e3997e48c8318af44fe83e1ce9517570a50c6/ruff-0.9.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4a091729086dffa4bd070aa5dab7e39cc6b9d62eb2bef8f3d91172d30d599666", size = 13335518 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/66/44/545f8a4d136830f08f4d24324e7db957c5374bf3a3f7a6c0bc7be4623a37/ruff-0.9.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1bbc6808bf7b15796cef0815e1dfb796fbd383e7dbd4334709642649625e7c5", size = 12823287 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c5/26/8208ef9ee7431032c143649a9967c3ae1aae4257d95e6f8519f07309aa66/ruff-0.9.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:589d1d9f25b5754ff230dce914a174a7c951a85a4e9270613a2b74231fdac2f5", size = 14592374 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/31/70/e917781e55ff39c5b5208bda384fd397ffd76605e68544d71a7e40944945/ruff-0.9.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc61dd5131742e21103fbbdcad683a8813be0e3c204472d520d9a5021ca8b217", size = 12500173 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/84/f5/e4ddee07660f5a9622a9c2b639afd8f3104988dc4f6ba0b73ffacffa9a8c/ruff-0.9.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5e2d9126161d0357e5c8f30b0bd6168d2c3872372f14481136d13de9937f79b6", size = 11906555 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/f1/2b/6ff2fe383667075eef8656b9892e73dd9b119b5e3add51298628b87f6429/ruff-0.9.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:68660eab1a8e65babb5229a1f97b46e3120923757a68b5413d8561f8a85d4897", size = 11538958 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/3c/db/98e59e90de45d1eb46649151c10a062d5707b5b7f76f64eb1e29edf6ebb1/ruff-0.9.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c4cae6c4cc7b9b4017c71114115db0445b00a16de3bcde0946273e8392856f08", size = 12117247 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ec/bc/54e38f6d219013a9204a5a2015c09e7a8c36cedcd50a4b01ac69a550b9d9/ruff-0.9.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:19f505b643228b417c1111a2a536424ddde0db4ef9023b9e04a46ed8a1cb4656", size = 12554647 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/a5/7d/7b461ab0e2404293c0627125bb70ac642c2e8d55bf590f6fce85f508f1b2/ruff-0.9.6-py3-none-win32.whl", hash = "sha256:194d8402bceef1b31164909540a597e0d913c0e4952015a5b40e28c146121b5d", size = 9949214 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/ee/30/c3cee10f915ed75a5c29c1e57311282d1a15855551a64795c1b2bbe5cf37/ruff-0.9.6-py3-none-win_amd64.whl", hash = "sha256:03482d5c09d90d4ee3f40d97578423698ad895c87314c4de39ed2af945633caa", size = 10999914 },
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e8/a8/d71f44b93e3aa86ae232af1f2126ca7b95c0f515ec135462b3e1f351441c/ruff-0.9.6-py3-none-win_arm64.whl", hash = "sha256:0e2bb706a2be7ddfea4a4af918562fdc1bcb16df255e5fa595bbd800ce322a5a", size = 10177499 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "typing-extensions"
|
||||||
|
version = "4.12.2"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "urllib3"
|
||||||
|
version = "2.3.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 },
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "win32-setctime"
|
||||||
|
version = "1.2.0"
|
||||||
|
source = { registry = "https://pypi.org/simple" }
|
||||||
|
sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/705086c9d734d3b663af0e9bb3d4de6578d08f46b1b101c2442fd9aecaa2/win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0", size = 4867 }
|
||||||
|
wheels = [
|
||||||
|
{ url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083 },
|
||||||
|
]
|
||||||
Reference in New Issue
Block a user