Compare commits
98 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fd64088bde | ||
|
|
7832e41a3b | ||
|
|
cadd65d69b | ||
|
|
9f004797fc | ||
|
|
9041fee7ad | ||
|
|
9af6c9057c | ||
|
|
757ce91138 | ||
|
|
98f96ed5c7 | ||
|
|
3e15120e2a | ||
|
|
5824e6c0cc | ||
|
|
7087d75efb | ||
|
|
b2a06b8fd3 | ||
|
|
1ee055faf5 | ||
|
|
404089dfca | ||
|
|
ed24948dee | ||
|
|
1f16fcb8eb | ||
|
|
03de3affd7 | ||
|
|
2bad887659 | ||
|
|
796be47a63 | ||
|
|
dc1fe11590 | ||
|
|
13b4ff3215 | ||
|
|
dca54cf4fb | ||
|
|
a4365e59f3 | ||
|
|
b960bccb86 | ||
|
|
218037200c | ||
|
|
4ac670e837 | ||
|
|
96eff65c3e | ||
|
|
45471607c8 | ||
|
|
14885744b1 | ||
|
|
d1fd61f1d1 | ||
|
|
6c1ee4a7dc | ||
|
|
9a8e799e68 | ||
|
|
ffec4e2f28 | ||
|
|
00102891a5 | ||
|
|
aa76b83428 | ||
|
|
a644189ea5 | ||
|
|
c5d987a8c9 | ||
|
|
bdd68ad68d | ||
|
|
2d86bca781 | ||
|
|
1b01ff6ec2 | ||
|
|
f08ec43507 | ||
|
|
7f9424260a | ||
|
|
5f21943353 | ||
|
|
a5a795f43c | ||
|
|
fcb6d7625f | ||
|
|
fd2179998f | ||
|
|
654e7f20e1 | ||
|
|
1eb92cf7c1 | ||
|
|
111e284cc8 | ||
|
|
1a4e3f4ec4 | ||
|
|
4066228e57 | ||
|
|
59c6d278e3 | ||
|
|
39b33f3d43 | ||
|
|
e8faf52b2b | ||
|
|
370e9bac63 | ||
|
|
d0746cec5a | ||
|
|
251937431b | ||
|
|
50faf061af | ||
|
|
9ffbc49ad3 | ||
|
|
644dc8e3af | ||
|
|
47bc4e94dc | ||
|
|
f17d39fe17 | ||
|
|
966dcacf8d | ||
|
|
9afc00443c | ||
|
|
3ec177ea64 | ||
|
|
b360c9fd0b | ||
|
|
1ed791b1ed | ||
|
|
f19b1a3063 | ||
|
|
190a72bd3c | ||
|
|
c848106ce7 | ||
|
|
dd319271bd | ||
|
|
16879cc728 | ||
|
|
942ec3533f | ||
|
|
9f6edfc91a | ||
|
|
827ace2e97 | ||
|
|
f6b57a1b4d | ||
|
|
88a7526721 | ||
|
|
1efb4d8543 | ||
|
|
7571e9a343 | ||
|
|
7640e9ee03 | ||
|
|
50ed3d6400 | ||
|
|
c9a373851f | ||
|
|
a3f3db8f4e | ||
|
|
de619de923 | ||
|
|
852d8dc3c3 | ||
|
|
c104973f95 | ||
|
|
8b7fc5e323 | ||
|
|
afb71d8e00 | ||
|
|
34d97f8dde | ||
|
|
2ad6b3afdf | ||
|
|
7cd492dc98 | ||
|
|
74b5ea7b5e | ||
|
|
21fe4875eb | ||
|
|
aeb86f6b85 | ||
|
|
70ef31ff47 | ||
|
|
0584a85f90 | ||
|
|
beb4e667ae | ||
|
|
7695994ec2 |
114
.env.sample
114
.env.sample
@@ -1,40 +1,74 @@
|
|||||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
# Global Settings
|
||||||
DRYRUN = "True"
|
|
||||||
## Additional logging information
|
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||||
DEBUG = "True"
|
DRYRUN = "True"
|
||||||
## Debugging level, INFO is default, DEBUG is more verbose
|
|
||||||
DEBUG_LEVEL = "INFO"
|
## Additional logging information
|
||||||
## How often to run the script in seconds
|
DEBUG = "False"
|
||||||
SLEEP_DURATION = "3600"
|
|
||||||
## Log file where all output will be written to
|
## Debugging level, "info" is default, "debug" is more verbose
|
||||||
LOGFILE = "log.log"
|
DEBUG_LEVEL = "info"
|
||||||
## Map usernames between plex and jellyfin in the event that they are different, order does not matter
|
|
||||||
#USER_MAPPING = { "testuser2": "testuser3" }
|
## How often to run the script in seconds
|
||||||
## Map libraries between plex and jellyfin in the even that they are different, order does not matter
|
SLEEP_DURATION = "3600"
|
||||||
#LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
|
||||||
|
## Log file where all output will be written to
|
||||||
|
LOGFILE = "log.log"
|
||||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
|
||||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
## Map usernames between servers in the event that they are different, order does not matter
|
||||||
PLEX_BASEURL = "http://localhost:32400"
|
## Comma separated for multiple options
|
||||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
#USER_MAPPING = { "testuser2": "testuser3", "testuser1":"testuser4" }
|
||||||
PLEX_TOKEN = "SuperSecretToken"
|
|
||||||
## If not using plex token then use username and password of the server admin along with the servername
|
## Map libraries between servers in the even that they are different, order does not matter
|
||||||
#PLEX_USERNAME = ""
|
## Comma separated for multiple options
|
||||||
#PLEX_PASSWORD = ""
|
#LIBRARY_MAPPING = { "Shows": "TV Shows", "Movie": "Movies" }
|
||||||
#PLEX_SERVERNAME = "Plex Server"
|
|
||||||
|
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||||
|
## Comma separated for multiple options
|
||||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
#BLACKLIST_LIBRARY = ""
|
||||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
#WHITELIST_LIBRARY = ""
|
||||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
#BLACKLIST_LIBRARY_TYPE = ""
|
||||||
JELLYFIN_TOKEN = "SuperSecretToken"
|
#WHITELIST_LIBRARY_TYPE = ""
|
||||||
|
#BLACKLIST_USERS = ""
|
||||||
|
WHITELIST_USERS = "testuser1,testuser2"
|
||||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
|
||||||
#BLACKLIST_LIBRARY = ""
|
|
||||||
#WHITELIST_LIBRARY = ""
|
|
||||||
#BLACKLIST_LIBRARY_TYPE = ""
|
# Plex
|
||||||
#WHITELIST_LIBRARY_TYPE = ""
|
|
||||||
#BLACKLIST_USERS = ""
|
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||||
WHITELIST_USERS = "testuser1,testuser2"
|
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||||
|
## Comma separated list for multiple servers
|
||||||
|
PLEX_BASEURL = "http://localhost:32400, https://nas:32400"
|
||||||
|
|
||||||
|
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||||
|
## Comma separated list for multiple servers
|
||||||
|
PLEX_TOKEN = "SuperSecretToken, SuperSecretToken2"
|
||||||
|
|
||||||
|
## If not using plex token then use username and password of the server admin along with the servername
|
||||||
|
## Comma separated for multiple options
|
||||||
|
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||||
|
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||||
|
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||||
|
|
||||||
|
## Skip hostname validation for ssl certificates.
|
||||||
|
## Set to True if running into ssl certificate errors
|
||||||
|
SSL_BYPASS = "False"
|
||||||
|
|
||||||
|
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||||
|
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||||
|
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||||
|
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||||
|
|
||||||
|
|
||||||
|
# Jellyfin
|
||||||
|
|
||||||
|
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||||
|
## Comma separated list for multiple servers
|
||||||
|
JELLYFIN_BASEURL = "http://localhost:8096, http://nas:8096"
|
||||||
|
|
||||||
|
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||||
|
## Comma separated list for multiple servers
|
||||||
|
JELLYFIN_TOKEN = "SuperSecretToken, SuperSecretToken2"
|
||||||
|
|||||||
31
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
31
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
---
|
||||||
|
name: Bug report
|
||||||
|
about: Create a report to help us improve
|
||||||
|
title: "[BUG]"
|
||||||
|
labels: ''
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Describe the bug**
|
||||||
|
A clear and concise description of what the bug is.
|
||||||
|
|
||||||
|
**To Reproduce**
|
||||||
|
Steps to reproduce the behavior:
|
||||||
|
1. Go to '...'
|
||||||
|
2. Click on '....'
|
||||||
|
3. Scroll down to '....'
|
||||||
|
4. See error
|
||||||
|
|
||||||
|
**Expected behavior**
|
||||||
|
A clear and concise description of what you expected to happen.
|
||||||
|
|
||||||
|
**Logs**
|
||||||
|
If applicable, add logs to help explain your problem ideally with DEBUG set to true, be sure to remove sensitive information
|
||||||
|
|
||||||
|
**Type:**
|
||||||
|
- [ ] Docker
|
||||||
|
- [ ] Native
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context about the problem here.
|
||||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
---
|
||||||
|
name: Feature request
|
||||||
|
about: Suggest an idea for this project
|
||||||
|
title: "[Feature Request]"
|
||||||
|
labels: ''
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Is your feature request related to a problem? Please describe.**
|
||||||
|
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||||
|
|
||||||
|
**Describe the solution you'd like**
|
||||||
|
A clear and concise description of what you want to happen.
|
||||||
|
|
||||||
|
**Describe alternatives you've considered**
|
||||||
|
A clear and concise description of any alternative solutions or features you've considered.
|
||||||
|
|
||||||
|
**Additional context**
|
||||||
|
Add any other context or screenshots about the feature request here.
|
||||||
160
.github/workflows/ci.yml
vendored
160
.github/workflows/ci.yml
vendored
@@ -1,74 +1,86 @@
|
|||||||
name: CI
|
name: CI
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- .gitignore
|
- .gitignore
|
||||||
- "*.md"
|
- "*.md"
|
||||||
pull_request:
|
pull_request:
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- .gitignore
|
- .gitignore
|
||||||
- "*.md"
|
- "*.md"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
docker:
|
pytest:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- uses: actions/checkout@v3
|
||||||
uses: actions/checkout@v2
|
|
||||||
|
- name: "Install dependencies"
|
||||||
- name: Docker meta
|
run: pip install -r requirements.txt && pip install -r test/requirements.txt
|
||||||
id: docker_meta
|
|
||||||
env:
|
- name: "Run tests"
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
run: pytest -vvv
|
||||||
if: "${{ env.DOCKER_USERNAME != '' }}"
|
|
||||||
uses: docker/metadata-action@v4
|
docker:
|
||||||
with:
|
runs-on: ubuntu-latest
|
||||||
images: ${{ secrets.DOCKER_USERNAME }}/jellyplex-watched # list of Docker images to use as base name for tags
|
needs: pytest
|
||||||
tags: |
|
steps:
|
||||||
type=raw,value=latest,enable={{is_default_branch}}
|
- name: Checkout
|
||||||
type=ref,event=branch
|
uses: actions/checkout@v3
|
||||||
type=ref,event=pr
|
|
||||||
type=semver,pattern={{version}}
|
- name: Docker meta
|
||||||
type=semver,pattern={{major}}.{{minor}}
|
id: docker_meta
|
||||||
type=sha
|
env:
|
||||||
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
- name: Set up QEMU
|
if: "${{ env.DOCKER_USERNAME != '' }}"
|
||||||
uses: docker/setup-qemu-action@v1
|
uses: docker/metadata-action@v4
|
||||||
|
with:
|
||||||
- name: Set up Docker Buildx
|
images: ${{ secrets.DOCKER_USERNAME }}/jellyplex-watched # list of Docker images to use as base name for tags
|
||||||
uses: docker/setup-buildx-action@v1
|
tags: |
|
||||||
|
type=raw,value=latest,enable={{is_default_branch}}
|
||||||
- name: Login to DockerHub
|
type=ref,event=branch
|
||||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
type=ref,event=pr
|
||||||
uses: docker/login-action@v1
|
type=semver,pattern={{version}}
|
||||||
with:
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
type=sha
|
||||||
password: ${{ secrets.DOCKER_TOKEN }}
|
|
||||||
|
- name: Set up QEMU
|
||||||
- name: Build
|
uses: docker/setup-qemu-action@v2
|
||||||
id: build
|
|
||||||
if: "${{ steps.docker_meta.outcome == 'skipped' }}"
|
- name: Set up Docker Buildx
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/setup-buildx-action@v2
|
||||||
with:
|
|
||||||
context: .
|
- name: Login to DockerHub
|
||||||
file: ./Dockerfile
|
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||||
platforms: linux/amd64,linux/arm64
|
uses: docker/login-action@v2
|
||||||
push: false
|
with:
|
||||||
tags: jellyplex-watched:action
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_TOKEN }}
|
||||||
- name: Build Push
|
|
||||||
id: build_push
|
- name: Build
|
||||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
id: build
|
||||||
uses: docker/build-push-action@v2
|
if: "${{ steps.docker_meta.outcome == 'skipped' }}"
|
||||||
with:
|
uses: docker/build-push-action@v3
|
||||||
context: .
|
with:
|
||||||
file: ./Dockerfile
|
context: .
|
||||||
platforms: linux/amd64,linux/arm64
|
file: ./Dockerfile
|
||||||
push: true
|
platforms: linux/amd64,linux/arm64
|
||||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
push: false
|
||||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
tags: jellyplex-watched:action
|
||||||
|
|
||||||
# Echo digest so users can validate their image
|
- name: Build Push
|
||||||
- name: Image digest
|
id: build_push
|
||||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||||
run: echo "${{ steps.build_push.outputs.digest }}"
|
uses: docker/build-push-action@v3
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./Dockerfile
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||||
|
|
||||||
|
# Echo digest so users can validate their image
|
||||||
|
- name: Image digest
|
||||||
|
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||||
|
run: echo "${{ steps.build_push.outputs.digest }}"
|
||||||
|
|||||||
41
.github/workflows/codeql.yml
vendored
Normal file
41
.github/workflows/codeql.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
name: "CodeQL"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ "main" ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ "main" ]
|
||||||
|
schedule:
|
||||||
|
- cron: "23 20 * * 6"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: [ python ]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v2
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
queries: +security-and-quality
|
||||||
|
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: "/language:${{ matrix.language }}"
|
||||||
263
.gitignore
vendored
263
.gitignore
vendored
@@ -1,131 +1,132 @@
|
|||||||
.env
|
.env
|
||||||
|
*.prof
|
||||||
# Byte-compiled / optimized / DLL files
|
|
||||||
__pycache__/
|
# Byte-compiled / optimized / DLL files
|
||||||
*.py[cod]
|
__pycache__/
|
||||||
*$py.class
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
# C extensions
|
|
||||||
*.so
|
# C extensions
|
||||||
|
*.so
|
||||||
# Distribution / packaging
|
|
||||||
.Python
|
# Distribution / packaging
|
||||||
build/
|
.Python
|
||||||
develop-eggs/
|
build/
|
||||||
dist/
|
develop-eggs/
|
||||||
downloads/
|
dist/
|
||||||
eggs/
|
downloads/
|
||||||
.eggs/
|
eggs/
|
||||||
lib/
|
.eggs/
|
||||||
lib64/
|
lib/
|
||||||
parts/
|
lib64/
|
||||||
sdist/
|
parts/
|
||||||
var/
|
sdist/
|
||||||
wheels/
|
var/
|
||||||
pip-wheel-metadata/
|
wheels/
|
||||||
share/python-wheels/
|
pip-wheel-metadata/
|
||||||
*.egg-info/
|
share/python-wheels/
|
||||||
.installed.cfg
|
*.egg-info/
|
||||||
*.egg
|
.installed.cfg
|
||||||
MANIFEST
|
*.egg
|
||||||
|
MANIFEST
|
||||||
# PyInstaller
|
|
||||||
# Usually these files are written by a python script from a template
|
# PyInstaller
|
||||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
# Usually these files are written by a python script from a template
|
||||||
*.manifest
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
*.spec
|
*.manifest
|
||||||
|
*.spec
|
||||||
# Installer logs
|
|
||||||
pip-log.txt
|
# Installer logs
|
||||||
pip-delete-this-directory.txt
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
# Unit test / coverage reports
|
|
||||||
htmlcov/
|
# Unit test / coverage reports
|
||||||
.tox/
|
htmlcov/
|
||||||
.nox/
|
.tox/
|
||||||
.coverage
|
.nox/
|
||||||
.coverage.*
|
.coverage
|
||||||
.cache
|
.coverage.*
|
||||||
nosetests.xml
|
.cache
|
||||||
coverage.xml
|
nosetests.xml
|
||||||
*.cover
|
coverage.xml
|
||||||
*.py,cover
|
*.cover
|
||||||
.hypothesis/
|
*.py,cover
|
||||||
.pytest_cache/
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
# Translations
|
|
||||||
*.mo
|
# Translations
|
||||||
*.pot
|
*.mo
|
||||||
|
*.pot
|
||||||
# Django stuff:
|
|
||||||
*.log
|
# Django stuff:
|
||||||
local_settings.py
|
*.log
|
||||||
db.sqlite3
|
local_settings.py
|
||||||
db.sqlite3-journal
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
# Flask stuff:
|
|
||||||
instance/
|
# Flask stuff:
|
||||||
.webassets-cache
|
instance/
|
||||||
|
.webassets-cache
|
||||||
# Scrapy stuff:
|
|
||||||
.scrapy
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
# Sphinx documentation
|
|
||||||
docs/_build/
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
# PyBuilder
|
|
||||||
target/
|
# PyBuilder
|
||||||
|
target/
|
||||||
# Jupyter Notebook
|
|
||||||
.ipynb_checkpoints
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
# IPython
|
|
||||||
profile_default/
|
# IPython
|
||||||
ipython_config.py
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
# pyenv
|
|
||||||
.python-version
|
# pyenv
|
||||||
|
.python-version
|
||||||
# pipenv
|
|
||||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
# pipenv
|
||||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
# install all needed dependencies.
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
#Pipfile.lock
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
|
||||||
__pypackages__/
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||||
|
__pypackages__/
|
||||||
# Celery stuff
|
|
||||||
celerybeat-schedule
|
# Celery stuff
|
||||||
celerybeat.pid
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
# SageMath parsed files
|
|
||||||
*.sage.py
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
# Environments
|
|
||||||
.env
|
# Environments
|
||||||
.venv
|
.env
|
||||||
env/
|
.venv
|
||||||
venv/
|
env/
|
||||||
ENV/
|
venv/
|
||||||
env.bak/
|
ENV/
|
||||||
venv.bak/
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
# Spyder project settings
|
|
||||||
.spyderproject
|
# Spyder project settings
|
||||||
.spyproject
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
# Rope project settings
|
|
||||||
.ropeproject
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
# mkdocs documentation
|
|
||||||
/site
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
# mypy
|
|
||||||
.mypy_cache/
|
# mypy
|
||||||
.dmypy.json
|
.mypy_cache/
|
||||||
dmypy.json
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
# Pyre type checker
|
|
||||||
.pyre/
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|||||||
32
.vscode/launch.json
vendored
32
.vscode/launch.json
vendored
@@ -1,16 +1,16 @@
|
|||||||
{
|
{
|
||||||
// Use IntelliSense to learn about possible attributes.
|
// Use IntelliSense to learn about possible attributes.
|
||||||
// Hover to view descriptions of existing attributes.
|
// Hover to view descriptions of existing attributes.
|
||||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||||
"version": "0.2.0",
|
"version": "0.2.0",
|
||||||
"configurations": [
|
"configurations": [
|
||||||
{
|
{
|
||||||
"name": "Python",
|
"name": "Python: Main",
|
||||||
"type": "python",
|
"type": "python",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"program": "main.py",
|
"program": "main.py",
|
||||||
"console": "integratedTerminal",
|
"console": "integratedTerminal",
|
||||||
"justMyCode": true
|
"justMyCode": true
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
7
.vscode/settings.json
vendored
Normal file
7
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"[python]" : {
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
},
|
||||||
|
"python.formatting.provider": "black",
|
||||||
|
|
||||||
|
}
|
||||||
51
Dockerfile
51
Dockerfile
@@ -1,10 +1,41 @@
|
|||||||
FROM python:3-slim
|
FROM python:3-slim
|
||||||
|
|
||||||
WORKDIR /app
|
ENV DRYRUN 'True'
|
||||||
|
ENV DEBUG 'True'
|
||||||
COPY ./requirements.txt ./
|
ENV DEBUG_LEVEL 'INFO'
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
ENV SLEEP_DURATION '3600'
|
||||||
|
ENV LOGFILE 'log.log'
|
||||||
COPY . .
|
|
||||||
|
ENV USER_MAPPING ''
|
||||||
CMD ["python", "-u", "main.py"]
|
ENV LIBRARY_MAPPING ''
|
||||||
|
|
||||||
|
ENV PLEX_BASEURL ''
|
||||||
|
ENV PLEX_TOKEN ''
|
||||||
|
ENV PLEX_USERNAME ''
|
||||||
|
ENV PLEX_PASSWORD ''
|
||||||
|
ENV PLEX_SERVERNAME ''
|
||||||
|
|
||||||
|
ENV JELLYFIN_BASEURL ''
|
||||||
|
ENV JELLYFIN_TOKEN ''
|
||||||
|
|
||||||
|
ENV BLACKLIST_LIBRARY ''
|
||||||
|
ENV WHITELIST_LIBRARY ''
|
||||||
|
ENV BLACKLIST_LIBRARY_TYPE ''
|
||||||
|
ENV WHITELIST_LIBRARY_TYPE ''
|
||||||
|
ENV BLACKLIST_USERS ''
|
||||||
|
ENV WHITELIST_USERS ''
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
build-essential && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
COPY ./requirements.txt ./
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
CMD ["python", "-u", "main.py"]
|
||||||
|
|||||||
218
README.md
218
README.md
@@ -1,69 +1,149 @@
|
|||||||
# JellyPlex-Watched
|
# JellyPlex-Watched
|
||||||
|
|
||||||
[](https://www.codacy.com/gh/luigi311/JellyPlex-Watched/dashboard?utm_source=github.com&utm_medium=referral&utm_content=luigi311/JellyPlex-Watched&utm_campaign=Badge_Grade)
|
[](https://www.codacy.com/gh/luigi311/JellyPlex-Watched/dashboard?utm_source=github.com&utm_medium=referral&utm_content=luigi311/JellyPlex-Watched&utm_campaign=Badge_Grade)
|
||||||
|
|
||||||
Sync watched between jellyfin and plex
|
Sync watched between jellyfin and plex locally
|
||||||
|
|
||||||
## Description
|
## Description
|
||||||
|
|
||||||
Keep in sync all your users watched history between jellyfin and plex locally. This uses the imdb ids and any other matching id to find the correct episode/movie between the two. This is not perfect but it works for most cases.
|
Keep in sync all your users watched history between jellyfin and plex servers locally. This uses file names and provider ids to find the correct episode/movie between the two. This is not perfect but it works for most cases. You can use this for as many servers as you want by entering multiple options in the .env plex/jellyfin section separated by commas.
|
||||||
|
|
||||||
## Installation
|
## Configuration
|
||||||
|
|
||||||
### Baremetal
|
```bash
|
||||||
|
# Global Settings
|
||||||
- Setup virtualenv of your choice
|
|
||||||
|
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||||
- Install dependencies
|
DRYRUN = "True"
|
||||||
|
|
||||||
```bash
|
## Additional logging information
|
||||||
pip install -r requirements.txt
|
DEBUG = "False"
|
||||||
```
|
|
||||||
|
## Debugging level, "info" is default, "debug" is more verbose
|
||||||
- Create a .env file similar to .env.sample, uncomment whitelist and blacklist if needed, fill in baseurls and tokens
|
DEBUG_LEVEL = "info"
|
||||||
|
|
||||||
- Run
|
## How often to run the script in seconds
|
||||||
|
SLEEP_DURATION = "3600"
|
||||||
```bash
|
|
||||||
python main.py
|
## Log file where all output will be written to
|
||||||
```
|
LOGFILE = "log.log"
|
||||||
|
|
||||||
### Docker
|
## Map usernames between servers in the event that they are different, order does not matter
|
||||||
|
## Comma separated for multiple options
|
||||||
- Build docker image
|
USER_MAPPING = { "testuser2": "testuser3", "testuser1":"testuser4" }
|
||||||
|
|
||||||
```bash
|
## Map libraries between servers in the even that they are different, order does not matter
|
||||||
docker build -t jellyplex-watched .
|
## Comma separated for multiple options
|
||||||
```
|
LIBRARY_MAPPING = { "Shows": "TV Shows", "Movie": "Movies" }
|
||||||
|
|
||||||
- or use pre-built image
|
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||||
|
## Comma separated for multiple options
|
||||||
```bash
|
BLACKLIST_LIBRARY = ""
|
||||||
docker pull luigi311/jellyplex-watched:latest
|
WHITELIST_LIBRARY = ""
|
||||||
```
|
BLACKLIST_LIBRARY_TYPE = ""
|
||||||
|
WHITELIST_LIBRARY_TYPE = ""
|
||||||
#### With variables
|
BLACKLIST_USERS = ""
|
||||||
|
WHITELIST_USERS = "testuser1,testuser2"
|
||||||
- Run
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker run --rm -it -e PLEX_TOKEN='SuperSecretToken' luigi311/jellyplex-watched:latest
|
# Plex
|
||||||
```
|
|
||||||
|
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||||
#### With .env
|
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||||
|
## Comma separated list for multiple servers
|
||||||
- Create a .env file similar to .env.sample and set the MNEMONIC variable to your seed phrase
|
PLEX_BASEURL = "http://localhost:32400, https://nas:32400"
|
||||||
|
|
||||||
- Run
|
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||||
|
## Comma separated list for multiple servers
|
||||||
```bash
|
PLEX_TOKEN = "SuperSecretToken, SuperSecretToken2"
|
||||||
docker run --rm -it -v "$(pwd)/.env:/app/.env" luigi311/jellyplex-watched:latest
|
|
||||||
```
|
## If not using plex token then use username and password of the server admin along with the servername
|
||||||
|
## Comma separated for multiple options
|
||||||
## Contributing
|
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||||
|
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||||
I am open to recieving pull requests. If you are submitting a pull request, please make sure run it locally for a day or two to make sure it is working as expected and stable. Make all pull requests against the dev branch and nothing will be merged into the main without going through the lower branches.
|
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||||
|
|
||||||
## License
|
## Skip hostname validation for ssl certificates.
|
||||||
|
## Set to True if running into ssl certificate errors
|
||||||
This is currently under the GNU General Public License v3.0.
|
SSL_BYPASS = "False"
|
||||||
|
|
||||||
|
|
||||||
|
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||||
|
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||||
|
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||||
|
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||||
|
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||||
|
|
||||||
|
|
||||||
|
# Jellyfin
|
||||||
|
|
||||||
|
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||||
|
## Comma separated list for multiple servers
|
||||||
|
JELLYFIN_BASEURL = "http://localhost:8096, http://nas:8096"
|
||||||
|
|
||||||
|
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||||
|
## Comma separated list for multiple servers
|
||||||
|
JELLYFIN_TOKEN = "SuperSecretToken, SuperSecretToken2"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
### Baremetal
|
||||||
|
|
||||||
|
- Setup virtualenv of your choice
|
||||||
|
|
||||||
|
- Install dependencies
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
- Create a .env file similar to .env.sample, uncomment whitelist and blacklist if needed, fill in baseurls and tokens
|
||||||
|
|
||||||
|
- Run
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python main.py
|
||||||
|
```
|
||||||
|
|
||||||
|
### Docker
|
||||||
|
|
||||||
|
- Build docker image
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker build -t jellyplex-watched .
|
||||||
|
```
|
||||||
|
|
||||||
|
- or use pre-built image
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker pull luigi311/jellyplex-watched:latest
|
||||||
|
```
|
||||||
|
|
||||||
|
#### With variables
|
||||||
|
|
||||||
|
- Run
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker run --rm -it -e PLEX_TOKEN='SuperSecretToken' luigi311/jellyplex-watched:latest
|
||||||
|
```
|
||||||
|
|
||||||
|
#### With .env
|
||||||
|
|
||||||
|
- Create a .env file similar to .env.sample and set the variables to match your setup
|
||||||
|
|
||||||
|
- Run
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker run --rm -it -v "$(pwd)/.env:/app/.env" luigi311/jellyplex-watched:latest
|
||||||
|
```
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
I am open to receiving pull requests. If you are submitting a pull request, please make sure run it locally for a day or two to make sure it is working as expected and stable. Make all pull requests against the dev branch and nothing will be merged into the main without going through the lower branches.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
This is currently under the GNU General Public License v3.0.
|
||||||
|
|||||||
318
main.py
318
main.py
@@ -1,307 +1,11 @@
|
|||||||
import copy, os, traceback, json
|
import sys
|
||||||
from dotenv import load_dotenv
|
|
||||||
from time import sleep
|
if __name__ == "__main__":
|
||||||
|
# Check python version 3.6 or higher
|
||||||
from src.functions import logger, str_to_bool, search_mapping, generate_library_guids_dict
|
if not (3, 6) <= tuple(map(int, sys.version_info[:2])):
|
||||||
from src.plex import Plex
|
print("This script requires Python 3.6 or higher")
|
||||||
from src.jellyfin import Jellyfin
|
sys.exit(1)
|
||||||
|
|
||||||
load_dotenv(override=True)
|
from src.main import main
|
||||||
|
|
||||||
def cleanup_watched(watched_list_1, watched_list_2, user_mapping=None, library_mapping=None):
|
main()
|
||||||
modified_watched_list_1 = copy.deepcopy(watched_list_1)
|
|
||||||
|
|
||||||
# remove entries from plex_watched that are in jellyfin_watched
|
|
||||||
for user_1 in watched_list_1:
|
|
||||||
user_other = None
|
|
||||||
if user_mapping:
|
|
||||||
user_other = search_mapping(user_mapping, user_1)
|
|
||||||
if user_1 in modified_watched_list_1:
|
|
||||||
if user_1 in watched_list_2:
|
|
||||||
user_2 = user_1
|
|
||||||
elif user_other in watched_list_2:
|
|
||||||
user_2 = user_other
|
|
||||||
else:
|
|
||||||
logger(f"User {user_1} and {user_other} not found in watched list 2", 1)
|
|
||||||
continue
|
|
||||||
|
|
||||||
for library_1 in watched_list_1[user_1]:
|
|
||||||
library_other = None
|
|
||||||
if library_mapping:
|
|
||||||
library_other = search_mapping(library_mapping, library_1)
|
|
||||||
if library_1 in modified_watched_list_1[user_1]:
|
|
||||||
if library_1 in watched_list_2[user_2]:
|
|
||||||
library_2 = library_1
|
|
||||||
elif library_other in watched_list_2[user_2]:
|
|
||||||
library_2 = library_other
|
|
||||||
else:
|
|
||||||
logger(f"library {library_1} and {library_other} not found in watched list 2", 1)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Movies
|
|
||||||
if isinstance(watched_list_1[user_1][library_1], list):
|
|
||||||
for item in watched_list_1[user_1][library_1]:
|
|
||||||
for watch_list_1_key, watch_list_1_value in item.items():
|
|
||||||
for watch_list_2_item in watched_list_2[user_2][library_2]:
|
|
||||||
for watch_list_2_item_key, watch_list_2_item_value in watch_list_2_item.items():
|
|
||||||
if watch_list_1_key == watch_list_2_item_key and watch_list_1_value == watch_list_2_item_value:
|
|
||||||
if item in modified_watched_list_1[user_1][library_1]:
|
|
||||||
logger(f"Removing {item} from {library_1}", 3)
|
|
||||||
modified_watched_list_1[user_1][library_1].remove(item)
|
|
||||||
|
|
||||||
|
|
||||||
# TV Shows
|
|
||||||
elif isinstance(watched_list_1[user_1][library_1], dict):
|
|
||||||
# Generate full list of provider ids for episodes in watch_list_2 to easily compare if they exist in watch_list_1
|
|
||||||
_, episode_watched_list_2_keys_dict, _ = generate_library_guids_dict(watched_list_2[user_2][library_2], 1)
|
|
||||||
|
|
||||||
for show_key_1 in watched_list_1[user_1][library_1].keys():
|
|
||||||
show_key_dict = dict(show_key_1)
|
|
||||||
for season in watched_list_1[user_1][library_1][show_key_1]:
|
|
||||||
for episode in watched_list_1[user_1][library_1][show_key_1][season]:
|
|
||||||
for episode_key, episode_item in episode.items():
|
|
||||||
# If episode_key and episode_item are in episode_watched_list_2_keys_dict exactly, then remove from watch_list_1
|
|
||||||
if episode_key in episode_watched_list_2_keys_dict.keys():
|
|
||||||
if episode_item in episode_watched_list_2_keys_dict[episode_key]:
|
|
||||||
if episode in modified_watched_list_1[user_1][library_1][show_key_1][season]:
|
|
||||||
logger(f"Removing {show_key_dict['title']} {episode} from {library_1}", 3)
|
|
||||||
modified_watched_list_1[user_1][library_1][show_key_1][season].remove(episode)
|
|
||||||
|
|
||||||
# Remove empty seasons
|
|
||||||
if len(modified_watched_list_1[user_1][library_1][show_key_1][season]) == 0:
|
|
||||||
if season in modified_watched_list_1[user_1][library_1][show_key_1]:
|
|
||||||
logger(f"Removing {season} from {library_1} because it is empty", 3)
|
|
||||||
del modified_watched_list_1[user_1][library_1][show_key_1][season]
|
|
||||||
|
|
||||||
# If the show is empty, remove the show
|
|
||||||
if len(modified_watched_list_1[user_1][library_1][show_key_1]) == 0:
|
|
||||||
if show_key_1 in modified_watched_list_1[user_1][library_1]:
|
|
||||||
logger(f"Removing {show_key_dict['title']} from {library_1} because it is empty", 1)
|
|
||||||
del modified_watched_list_1[user_1][library_1][show_key_1]
|
|
||||||
|
|
||||||
# If library is empty then remove it
|
|
||||||
if len(modified_watched_list_1[user_1][library_1]) == 0:
|
|
||||||
if library_1 in modified_watched_list_1[user_1]:
|
|
||||||
logger(f"Removing {library_1} from {user_1} because it is empty", 1)
|
|
||||||
del modified_watched_list_1[user_1][library_1]
|
|
||||||
|
|
||||||
# If user is empty delete user
|
|
||||||
if len(modified_watched_list_1[user_1]) == 0:
|
|
||||||
logger(f"Removing {user_1} from watched list 1 because it is empty", 1)
|
|
||||||
del modified_watched_list_1[user_1]
|
|
||||||
|
|
||||||
return modified_watched_list_1
|
|
||||||
|
|
||||||
def setup_black_white_lists(library_mapping=None):
|
|
||||||
blacklist_library = os.getenv("BLACKLIST_LIBRARY")
|
|
||||||
if blacklist_library:
|
|
||||||
if len(blacklist_library) > 0:
|
|
||||||
blacklist_library = blacklist_library.split(",")
|
|
||||||
blacklist_library = [x.strip() for x in blacklist_library]
|
|
||||||
if library_mapping:
|
|
||||||
temp_library = []
|
|
||||||
for library in blacklist_library:
|
|
||||||
library_other = search_mapping(library_mapping, library)
|
|
||||||
if library_other:
|
|
||||||
temp_library.append(library_other)
|
|
||||||
|
|
||||||
blacklist_library = blacklist_library + temp_library
|
|
||||||
else:
|
|
||||||
blacklist_library = []
|
|
||||||
|
|
||||||
logger(f"Blacklist Library: {blacklist_library}", 1)
|
|
||||||
|
|
||||||
whitelist_library = os.getenv("WHITELIST_LIBRARY")
|
|
||||||
if whitelist_library:
|
|
||||||
if len(whitelist_library) > 0:
|
|
||||||
whitelist_library = whitelist_library.split(",")
|
|
||||||
whitelist_library = [x.strip() for x in whitelist_library]
|
|
||||||
if library_mapping:
|
|
||||||
temp_library = []
|
|
||||||
for library in whitelist_library:
|
|
||||||
library_other = search_mapping(library_mapping, library)
|
|
||||||
if library_other:
|
|
||||||
temp_library.append(library_other)
|
|
||||||
|
|
||||||
whitelist_library = whitelist_library + temp_library
|
|
||||||
else:
|
|
||||||
whitelist_library = []
|
|
||||||
logger(f"Whitelist Library: {whitelist_library}", 1)
|
|
||||||
|
|
||||||
blacklist_library_type = os.getenv("BLACKLIST_LIBRARY_TYPE")
|
|
||||||
if blacklist_library_type:
|
|
||||||
if len(blacklist_library_type) > 0:
|
|
||||||
blacklist_library_type = blacklist_library_type.split(",")
|
|
||||||
blacklist_library_type = [x.lower().strip() for x in blacklist_library_type]
|
|
||||||
else:
|
|
||||||
blacklist_library_type = []
|
|
||||||
logger(f"Blacklist Library Type: {blacklist_library_type}", 1)
|
|
||||||
|
|
||||||
whitelist_library_type = os.getenv("WHITELIST_LIBRARY_TYPE")
|
|
||||||
if whitelist_library_type:
|
|
||||||
if len(whitelist_library_type) > 0:
|
|
||||||
whitelist_library_type = whitelist_library_type.split(",")
|
|
||||||
whitelist_library_type = [x.lower().strip() for x in whitelist_library_type]
|
|
||||||
else:
|
|
||||||
whitelist_library_type = []
|
|
||||||
logger(f"Whitelist Library Type: {whitelist_library_type}", 1)
|
|
||||||
|
|
||||||
blacklist_users = os.getenv("BLACKLIST_USERS")
|
|
||||||
if blacklist_users:
|
|
||||||
if len(blacklist_users) > 0:
|
|
||||||
blacklist_users = blacklist_users.split(",")
|
|
||||||
blacklist_users = [x.lower().strip() for x in blacklist_users]
|
|
||||||
else:
|
|
||||||
blacklist_users = []
|
|
||||||
logger(f"Blacklist Users: {blacklist_users}", 1)
|
|
||||||
|
|
||||||
whitelist_users = os.getenv("WHITELIST_USERS")
|
|
||||||
if whitelist_users:
|
|
||||||
if len(whitelist_users) > 0:
|
|
||||||
whitelist_users = whitelist_users.split(",")
|
|
||||||
whitelist_users = [x.lower().strip() for x in whitelist_users]
|
|
||||||
else:
|
|
||||||
whitelist_users = []
|
|
||||||
else:
|
|
||||||
whitelist_users = []
|
|
||||||
logger(f"Whitelist Users: {whitelist_users}", 1)
|
|
||||||
|
|
||||||
return blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, blacklist_users, whitelist_users
|
|
||||||
|
|
||||||
def setup_users(plex, jellyfin, blacklist_users, whitelist_users, user_mapping=None):
|
|
||||||
|
|
||||||
# generate list of users from plex.users
|
|
||||||
plex_users = [ x.title.lower() for x in plex.users ]
|
|
||||||
jellyfin_users = [ key.lower() for key in jellyfin.users.keys() ]
|
|
||||||
|
|
||||||
# combined list of overlapping users from plex and jellyfin
|
|
||||||
users = {}
|
|
||||||
|
|
||||||
for plex_user in plex_users:
|
|
||||||
if user_mapping:
|
|
||||||
jellyfin_plex_mapped_user = search_mapping(user_mapping, plex_user)
|
|
||||||
if jellyfin_plex_mapped_user:
|
|
||||||
users[plex_user] = jellyfin_plex_mapped_user
|
|
||||||
continue
|
|
||||||
|
|
||||||
if plex_user in jellyfin_users:
|
|
||||||
users[plex_user] = plex_user
|
|
||||||
|
|
||||||
for jellyfin_user in jellyfin_users:
|
|
||||||
if user_mapping:
|
|
||||||
plex_jellyfin_mapped_user = search_mapping(user_mapping, jellyfin_user)
|
|
||||||
if plex_jellyfin_mapped_user:
|
|
||||||
users[plex_jellyfin_mapped_user] = jellyfin_user
|
|
||||||
continue
|
|
||||||
|
|
||||||
if jellyfin_user in plex_users:
|
|
||||||
users[jellyfin_user] = jellyfin_user
|
|
||||||
|
|
||||||
logger(f"User list that exist on both servers {users}", 1)
|
|
||||||
|
|
||||||
users_filtered = {}
|
|
||||||
for user in users:
|
|
||||||
# whitelist_user is not empty and user lowercase is not in whitelist lowercase
|
|
||||||
if len(whitelist_users) > 0:
|
|
||||||
if user not in whitelist_users and users[user] not in whitelist_users:
|
|
||||||
logger(f"{user} or {users[user]} is not in whitelist", 1)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if user not in blacklist_users and users[user] not in blacklist_users:
|
|
||||||
users_filtered[user] = users[user]
|
|
||||||
|
|
||||||
logger(f"Filtered user list {users_filtered}", 1)
|
|
||||||
|
|
||||||
plex_users = []
|
|
||||||
for plex_user in plex.users:
|
|
||||||
if plex_user.title.lower() in users_filtered.keys() or plex_user.title.lower() in users_filtered.values():
|
|
||||||
plex_users.append(plex_user)
|
|
||||||
|
|
||||||
jellyfin_users = {}
|
|
||||||
for jellyfin_user, jellyfin_id in jellyfin.users.items():
|
|
||||||
if jellyfin_user.lower() in users_filtered.keys() or jellyfin_user.lower() in users_filtered.values():
|
|
||||||
jellyfin_users[jellyfin_user] = jellyfin_id
|
|
||||||
|
|
||||||
if len(plex_users) == 0:
|
|
||||||
raise Exception(f"No plex users found, users found {users} filtered users {users_filtered}")
|
|
||||||
|
|
||||||
if len(jellyfin_users) == 0:
|
|
||||||
raise Exception(f"No jellyfin users found, users found {users} filtered users {users_filtered}")
|
|
||||||
|
|
||||||
logger(f"plex_users: {plex_users}", 1)
|
|
||||||
logger(f"jellyfin_users: {jellyfin_users}", 1)
|
|
||||||
|
|
||||||
return plex_users, jellyfin_users
|
|
||||||
|
|
||||||
def main():
|
|
||||||
logfile = os.getenv("LOGFILE","log.log")
|
|
||||||
# Delete logfile if it exists
|
|
||||||
if os.path.exists(logfile):
|
|
||||||
os.remove(logfile)
|
|
||||||
|
|
||||||
dryrun = str_to_bool(os.getenv("DRYRUN", "False"))
|
|
||||||
logger(f"Dryrun: {dryrun}", 1)
|
|
||||||
|
|
||||||
user_mapping = os.getenv("USER_MAPPING")
|
|
||||||
if user_mapping:
|
|
||||||
user_mapping = json.loads(user_mapping.lower())
|
|
||||||
logger(f"User Mapping: {user_mapping}", 1)
|
|
||||||
|
|
||||||
library_mapping = os.getenv("LIBRARY_MAPPING")
|
|
||||||
if library_mapping:
|
|
||||||
library_mapping = json.loads(library_mapping)
|
|
||||||
logger(f"Library Mapping: {library_mapping}", 1)
|
|
||||||
|
|
||||||
plex = Plex()
|
|
||||||
jellyfin = Jellyfin()
|
|
||||||
|
|
||||||
# Create (black/white)lists
|
|
||||||
blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, blacklist_users, whitelist_users = setup_black_white_lists(library_mapping)
|
|
||||||
|
|
||||||
# Create users list
|
|
||||||
plex_users, jellyfin_users = setup_users(plex, jellyfin, blacklist_users, whitelist_users, user_mapping)
|
|
||||||
|
|
||||||
plex_watched = plex.get_plex_watched(plex_users, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping)
|
|
||||||
jellyfin_watched = jellyfin.get_jellyfin_watched(jellyfin_users, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping)
|
|
||||||
|
|
||||||
# clone watched so it isnt modified in the cleanup function so all duplicates are actually removed
|
|
||||||
plex_watched_filtered = copy.deepcopy(plex_watched)
|
|
||||||
jellyfin_watched_filtered = copy.deepcopy(jellyfin_watched)
|
|
||||||
|
|
||||||
logger("Cleaning Plex Watched", 1)
|
|
||||||
plex_watched = cleanup_watched(plex_watched_filtered, jellyfin_watched_filtered, user_mapping, library_mapping)
|
|
||||||
|
|
||||||
logger("Cleaning Jellyfin Watched", 1)
|
|
||||||
jellyfin_watched = cleanup_watched(jellyfin_watched_filtered, plex_watched_filtered, user_mapping, library_mapping)
|
|
||||||
|
|
||||||
logger(f"plex_watched that needs to be synced to jellyfin:\n{plex_watched}", 1)
|
|
||||||
logger(f"jellyfin_watched that needs to be synced to plex:\n{jellyfin_watched}", 1)
|
|
||||||
|
|
||||||
# Update watched status
|
|
||||||
plex.update_watched(jellyfin_watched, user_mapping, library_mapping, dryrun)
|
|
||||||
jellyfin.update_watched(plex_watched, user_mapping, library_mapping, dryrun)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
sleep_timer = float(os.getenv("SLEEP_TIMER", "3600"))
|
|
||||||
|
|
||||||
while(True):
|
|
||||||
try:
|
|
||||||
main()
|
|
||||||
logger(f"Looping in {sleep_timer}")
|
|
||||||
except Exception as error:
|
|
||||||
if isinstance(error, list):
|
|
||||||
for message in error:
|
|
||||||
logger(message, log_type=2)
|
|
||||||
else:
|
|
||||||
logger(error, log_type=2)
|
|
||||||
|
|
||||||
|
|
||||||
logger(traceback.format_exc(), 2)
|
|
||||||
logger(f"Retrying in {sleep_timer}", log_type=0)
|
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
logger("Exiting", log_type=0)
|
|
||||||
os._exit(0)
|
|
||||||
|
|
||||||
sleep(sleep_timer)
|
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
plexapi
|
plexapi
|
||||||
requests
|
requests
|
||||||
python-dotenv
|
python-dotenv
|
||||||
|
aiohttp
|
||||||
|
|||||||
139
src/black_white.py
Normal file
139
src/black_white.py
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
from src.functions import logger, search_mapping
|
||||||
|
|
||||||
|
|
||||||
|
def setup_black_white_lists(
|
||||||
|
blacklist_library: str,
|
||||||
|
whitelist_library: str,
|
||||||
|
blacklist_library_type: str,
|
||||||
|
whitelist_library_type: str,
|
||||||
|
blacklist_users: str,
|
||||||
|
whitelist_users: str,
|
||||||
|
library_mapping=None,
|
||||||
|
user_mapping=None,
|
||||||
|
):
|
||||||
|
blacklist_library, blacklist_library_type, blacklist_users = setup_black_lists(
|
||||||
|
blacklist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
blacklist_users,
|
||||||
|
library_mapping,
|
||||||
|
user_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
whitelist_library, whitelist_library_type, whitelist_users = setup_white_lists(
|
||||||
|
whitelist_library,
|
||||||
|
whitelist_library_type,
|
||||||
|
whitelist_users,
|
||||||
|
library_mapping,
|
||||||
|
user_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
return (
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
blacklist_users,
|
||||||
|
whitelist_users,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_black_lists(
|
||||||
|
blacklist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
blacklist_users,
|
||||||
|
library_mapping=None,
|
||||||
|
user_mapping=None,
|
||||||
|
):
|
||||||
|
if blacklist_library:
|
||||||
|
if len(blacklist_library) > 0:
|
||||||
|
blacklist_library = blacklist_library.split(",")
|
||||||
|
blacklist_library = [x.strip() for x in blacklist_library]
|
||||||
|
if library_mapping:
|
||||||
|
temp_library = []
|
||||||
|
for library in blacklist_library:
|
||||||
|
library_other = search_mapping(library_mapping, library)
|
||||||
|
if library_other:
|
||||||
|
temp_library.append(library_other)
|
||||||
|
|
||||||
|
blacklist_library = blacklist_library + temp_library
|
||||||
|
else:
|
||||||
|
blacklist_library = []
|
||||||
|
logger(f"Blacklist Library: {blacklist_library}", 1)
|
||||||
|
|
||||||
|
if blacklist_library_type:
|
||||||
|
if len(blacklist_library_type) > 0:
|
||||||
|
blacklist_library_type = blacklist_library_type.split(",")
|
||||||
|
blacklist_library_type = [x.lower().strip() for x in blacklist_library_type]
|
||||||
|
else:
|
||||||
|
blacklist_library_type = []
|
||||||
|
logger(f"Blacklist Library Type: {blacklist_library_type}", 1)
|
||||||
|
|
||||||
|
if blacklist_users:
|
||||||
|
if len(blacklist_users) > 0:
|
||||||
|
blacklist_users = blacklist_users.split(",")
|
||||||
|
blacklist_users = [x.lower().strip() for x in blacklist_users]
|
||||||
|
if user_mapping:
|
||||||
|
temp_users = []
|
||||||
|
for user in blacklist_users:
|
||||||
|
user_other = search_mapping(user_mapping, user)
|
||||||
|
if user_other:
|
||||||
|
temp_users.append(user_other)
|
||||||
|
|
||||||
|
blacklist_users = blacklist_users + temp_users
|
||||||
|
else:
|
||||||
|
blacklist_users = []
|
||||||
|
logger(f"Blacklist Users: {blacklist_users}", 1)
|
||||||
|
|
||||||
|
return blacklist_library, blacklist_library_type, blacklist_users
|
||||||
|
|
||||||
|
|
||||||
|
def setup_white_lists(
|
||||||
|
whitelist_library,
|
||||||
|
whitelist_library_type,
|
||||||
|
whitelist_users,
|
||||||
|
library_mapping=None,
|
||||||
|
user_mapping=None,
|
||||||
|
):
|
||||||
|
if whitelist_library:
|
||||||
|
if len(whitelist_library) > 0:
|
||||||
|
whitelist_library = whitelist_library.split(",")
|
||||||
|
whitelist_library = [x.strip() for x in whitelist_library]
|
||||||
|
if library_mapping:
|
||||||
|
temp_library = []
|
||||||
|
for library in whitelist_library:
|
||||||
|
library_other = search_mapping(library_mapping, library)
|
||||||
|
if library_other:
|
||||||
|
temp_library.append(library_other)
|
||||||
|
|
||||||
|
whitelist_library = whitelist_library + temp_library
|
||||||
|
else:
|
||||||
|
whitelist_library = []
|
||||||
|
logger(f"Whitelist Library: {whitelist_library}", 1)
|
||||||
|
|
||||||
|
if whitelist_library_type:
|
||||||
|
if len(whitelist_library_type) > 0:
|
||||||
|
whitelist_library_type = whitelist_library_type.split(",")
|
||||||
|
whitelist_library_type = [x.lower().strip() for x in whitelist_library_type]
|
||||||
|
else:
|
||||||
|
whitelist_library_type = []
|
||||||
|
logger(f"Whitelist Library Type: {whitelist_library_type}", 1)
|
||||||
|
|
||||||
|
if whitelist_users:
|
||||||
|
if len(whitelist_users) > 0:
|
||||||
|
whitelist_users = whitelist_users.split(",")
|
||||||
|
whitelist_users = [x.lower().strip() for x in whitelist_users]
|
||||||
|
if user_mapping:
|
||||||
|
temp_users = []
|
||||||
|
for user in whitelist_users:
|
||||||
|
user_other = search_mapping(user_mapping, user)
|
||||||
|
if user_other:
|
||||||
|
temp_users.append(user_other)
|
||||||
|
|
||||||
|
whitelist_users = whitelist_users + temp_users
|
||||||
|
else:
|
||||||
|
whitelist_users = []
|
||||||
|
else:
|
||||||
|
whitelist_users = []
|
||||||
|
logger(f"Whitelist Users: {whitelist_users}", 1)
|
||||||
|
|
||||||
|
return whitelist_library, whitelist_library_type, whitelist_users
|
||||||
193
src/functions.py
193
src/functions.py
@@ -1,116 +1,77 @@
|
|||||||
import os
|
import os
|
||||||
from dotenv import load_dotenv
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
load_dotenv(override=True)
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
logfile = os.getenv("LOGFILE","log.log")
|
load_dotenv(override=True)
|
||||||
|
|
||||||
def logger(message, log_type=0):
|
logfile = os.getenv("LOGFILE", "log.log")
|
||||||
debug = str_to_bool(os.getenv("DEBUG", "True"))
|
|
||||||
debug_level = os.getenv("DEBUG_LEVEL", "INFO")
|
|
||||||
|
def logger(message: str, log_type=0):
|
||||||
output = str(message)
|
debug = str_to_bool(os.getenv("DEBUG", "False"))
|
||||||
if log_type == 0:
|
debug_level = os.getenv("DEBUG_LEVEL", "info").lower()
|
||||||
pass
|
|
||||||
elif log_type == 1 and (debug or debug_level == "INFO"):
|
output = str(message)
|
||||||
output = f"[INFO]: {output}"
|
if log_type == 0:
|
||||||
elif log_type == 2:
|
pass
|
||||||
output = f"[ERROR]: {output}"
|
elif log_type == 1 and (debug and debug_level in ("info", "debug")):
|
||||||
elif log_type == 3 and (debug and debug_level == "DEBUG"):
|
output = f"[INFO]: {output}"
|
||||||
output = f"[DEBUG]: {output}"
|
elif log_type == 2:
|
||||||
else:
|
output = f"[ERROR]: {output}"
|
||||||
output = None
|
elif log_type == 3 and (debug and debug_level == "debug"):
|
||||||
|
output = f"[DEBUG]: {output}"
|
||||||
if output is not None:
|
elif log_type == 4:
|
||||||
print(output)
|
output = f"[WARNING]: {output}"
|
||||||
file = open(logfile, "a", encoding="utf-8")
|
else:
|
||||||
file.write(output + "\n")
|
output = None
|
||||||
|
|
||||||
# Reimplementation of distutils.util.strtobool due to it being deprecated
|
if output is not None:
|
||||||
# Source: https://github.com/PostHog/posthog/blob/01e184c29d2c10c43166f1d40a334abbc3f99d8a/posthog/utils.py#L668
|
print(output)
|
||||||
def str_to_bool(value: any) -> bool:
|
file = open(logfile, "a", encoding="utf-8")
|
||||||
if not value:
|
file.write(output + "\n")
|
||||||
return False
|
|
||||||
return str(value).lower() in ("y", "yes", "t", "true", "on", "1")
|
|
||||||
|
# Reimplementation of distutils.util.strtobool due to it being deprecated
|
||||||
# Get mapped value
|
# Source: https://github.com/PostHog/posthog/blob/01e184c29d2c10c43166f1d40a334abbc3f99d8a/posthog/utils.py#L668
|
||||||
def search_mapping(dictionary: dict, key_value: str):
|
def str_to_bool(value: any) -> bool:
|
||||||
if key_value in dictionary.keys():
|
if not value:
|
||||||
return dictionary[key_value]
|
return False
|
||||||
elif key_value.lower() in dictionary.keys():
|
return str(value).lower() in ("y", "yes", "t", "true", "on", "1")
|
||||||
return dictionary[key_value]
|
|
||||||
elif key_value in dictionary.values():
|
|
||||||
return list(dictionary.keys())[list(dictionary.values()).index(key_value)]
|
# Get mapped value
|
||||||
elif key_value.lower() in dictionary.values():
|
def search_mapping(dictionary: dict, key_value: str):
|
||||||
return list(dictionary.keys())[list(dictionary.values()).index(key_value)]
|
if key_value in dictionary.keys():
|
||||||
else:
|
return dictionary[key_value]
|
||||||
return None
|
elif key_value.lower() in dictionary.keys():
|
||||||
|
return dictionary[key_value.lower()]
|
||||||
|
elif key_value in dictionary.values():
|
||||||
def check_skip_logic(library_title, library_type, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping):
|
return list(dictionary.keys())[list(dictionary.values()).index(key_value)]
|
||||||
skip_reason = None
|
elif key_value.lower() in dictionary.values():
|
||||||
|
return list(dictionary.keys())[
|
||||||
if library_type.lower() in blacklist_library_type:
|
list(dictionary.values()).index(key_value.lower())
|
||||||
skip_reason = "is blacklist_library_type"
|
]
|
||||||
|
else:
|
||||||
if library_title.lower() in [x.lower() for x in blacklist_library]:
|
return None
|
||||||
skip_reason = "is blacklist_library"
|
|
||||||
|
|
||||||
library_other = None
|
def future_thread_executor(args: list, workers: int = -1):
|
||||||
if library_mapping:
|
futures_list = []
|
||||||
library_other = search_mapping(library_mapping, library_title)
|
results = []
|
||||||
if library_other:
|
|
||||||
if library_other.lower() in [x.lower() for x in blacklist_library]:
|
if workers == -1:
|
||||||
skip_reason = "is blacklist_library"
|
workers = min(32, os.cpu_count() * 2)
|
||||||
|
|
||||||
if len(whitelist_library_type) > 0:
|
with ThreadPoolExecutor(max_workers=workers) as executor:
|
||||||
if library_type.lower() not in whitelist_library_type:
|
for arg in args:
|
||||||
skip_reason = "is not whitelist_library_type"
|
# * arg unpacks the list into actual arguments
|
||||||
|
futures_list.append(executor.submit(*arg))
|
||||||
# if whitelist is not empty and library is not in whitelist
|
|
||||||
if len(whitelist_library) > 0:
|
for future in futures_list:
|
||||||
if library_title.lower() not in [x.lower() for x in whitelist_library]:
|
try:
|
||||||
skip_reason = "is not whitelist_library"
|
result = future.result()
|
||||||
|
results.append(result)
|
||||||
if library_other:
|
except Exception as e:
|
||||||
if library_other.lower() not in [x.lower() for x in whitelist_library]:
|
raise Exception(e)
|
||||||
skip_reason = "is not whitelist_library"
|
|
||||||
|
return results
|
||||||
return skip_reason
|
|
||||||
|
|
||||||
|
|
||||||
def generate_library_guids_dict(user_list: dict, generate_output: int):
|
|
||||||
# if generate_output is 0 then only generate shows, if 1 then only generate episodes, if 2 then generate movies, if 3 then generate shows and episodes
|
|
||||||
show_output_dict = {}
|
|
||||||
episode_output_dict = {}
|
|
||||||
movies_output_dict = {}
|
|
||||||
|
|
||||||
if generate_output in (0, 3):
|
|
||||||
show_output_keys = user_list.keys()
|
|
||||||
show_output_keys = ([ dict(x) for x in list(show_output_keys) ])
|
|
||||||
for show_key in show_output_keys:
|
|
||||||
for provider_key, prvider_value in show_key.items():
|
|
||||||
# Skip title
|
|
||||||
if provider_key.lower() == "title":
|
|
||||||
continue
|
|
||||||
if provider_key.lower() not in show_output_dict:
|
|
||||||
show_output_dict[provider_key.lower()] = []
|
|
||||||
show_output_dict[provider_key.lower()].append(prvider_value.lower())
|
|
||||||
|
|
||||||
if generate_output in (1, 3):
|
|
||||||
for show in user_list:
|
|
||||||
for season in user_list[show]:
|
|
||||||
for episode in user_list[show][season]:
|
|
||||||
for episode_key, episode_value in episode.items():
|
|
||||||
if episode_key.lower() not in episode_output_dict:
|
|
||||||
episode_output_dict[episode_key.lower()] = []
|
|
||||||
episode_output_dict[episode_key.lower()].append(episode_value.lower())
|
|
||||||
|
|
||||||
if generate_output == 2:
|
|
||||||
for movie in user_list:
|
|
||||||
for movie_key, movie_value in movie.items():
|
|
||||||
if movie_key.lower() not in movies_output_dict:
|
|
||||||
movies_output_dict[movie_key.lower()] = []
|
|
||||||
movies_output_dict[movie_key.lower()].append(movie_value.lower())
|
|
||||||
|
|
||||||
return show_output_dict, episode_output_dict, movies_output_dict
|
|
||||||
|
|
||||||
|
|||||||
989
src/jellyfin.py
989
src/jellyfin.py
@@ -1,237 +1,752 @@
|
|||||||
import requests, os
|
import asyncio, aiohttp, traceback
|
||||||
from dotenv import load_dotenv
|
|
||||||
from src.functions import logger, search_mapping, str_to_bool, check_skip_logic, generate_library_guids_dict
|
from src.functions import (
|
||||||
|
logger,
|
||||||
load_dotenv(override=True)
|
search_mapping,
|
||||||
|
)
|
||||||
jellyfin_baseurl = os.getenv("JELLYFIN_BASEURL")
|
from src.library import (
|
||||||
jellyfin_token = os.getenv("JELLYFIN_TOKEN")
|
check_skip_logic,
|
||||||
|
generate_library_guids_dict,
|
||||||
class Jellyfin():
|
)
|
||||||
def __init__(self):
|
from src.watched import (
|
||||||
self.baseurl = jellyfin_baseurl
|
combine_watched_dicts,
|
||||||
self.token = jellyfin_token
|
)
|
||||||
|
|
||||||
if not self.baseurl:
|
|
||||||
raise Exception("Jellyfin baseurl not set")
|
class Jellyfin:
|
||||||
|
def __init__(self, baseurl, token):
|
||||||
if not self.token:
|
self.baseurl = baseurl
|
||||||
raise Exception("Jellyfin token not set")
|
self.token = token
|
||||||
|
|
||||||
self.users = self.get_users()
|
if not self.baseurl:
|
||||||
|
raise Exception("Jellyfin baseurl not set")
|
||||||
|
|
||||||
def query(self, query, query_type):
|
if not self.token:
|
||||||
try:
|
raise Exception("Jellyfin token not set")
|
||||||
response = None
|
|
||||||
|
self.users = asyncio.run(self.get_users())
|
||||||
if query_type == "get":
|
|
||||||
response = requests.get(self.baseurl + query, headers={"accept":"application/json", "X-Emby-Token": self.token})
|
async def query(self, query, query_type, session, identifiers=None):
|
||||||
|
try:
|
||||||
elif query_type == "post":
|
results = None
|
||||||
authorization = (
|
headers = {"Accept": "application/json", "X-Emby-Token": self.token}
|
||||||
'MediaBrowser , '
|
authorization = (
|
||||||
'Client="other", '
|
"MediaBrowser , "
|
||||||
'Device="script", '
|
'Client="other", '
|
||||||
'DeviceId="script", '
|
'Device="script", '
|
||||||
'Version="0.0.0"'
|
'DeviceId="script", '
|
||||||
)
|
'Version="0.0.0"'
|
||||||
response = requests.post(self.baseurl + query, headers={"accept":"application/json", "X-Emby-Authorization": authorization, "X-Emby-Token": self.token})
|
)
|
||||||
|
headers["X-Emby-Authorization"] = authorization
|
||||||
return response.json()
|
|
||||||
except Exception as e:
|
if query_type == "get":
|
||||||
logger(e, 2)
|
async with session.get(
|
||||||
logger(response, 2)
|
self.baseurl + query, headers=headers
|
||||||
|
) as response:
|
||||||
def get_users(self):
|
if response.status != 200:
|
||||||
users = {}
|
raise Exception(
|
||||||
|
f"Query failed with status {response.status} {response.reason}"
|
||||||
query = "/Users"
|
)
|
||||||
response = self.query(query, "get")
|
results = await response.json()
|
||||||
|
|
||||||
# If reponse is not empty
|
elif query_type == "post":
|
||||||
if response:
|
async with session.post(
|
||||||
for user in response:
|
self.baseurl + query, headers=headers
|
||||||
users[user["Name"]] = user["Id"]
|
) as response:
|
||||||
|
if response.status != 200:
|
||||||
return users
|
raise Exception(
|
||||||
|
f"Query failed with status {response.status} {response.reason}"
|
||||||
def get_jellyfin_watched(self, users, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping=None):
|
)
|
||||||
users_watched = {}
|
results = await response.json()
|
||||||
|
|
||||||
for user_name, user_id in users.items():
|
if not isinstance(results, list) and not isinstance(results, dict):
|
||||||
# Get all libraries
|
raise Exception("Query result is not of type list or dict")
|
||||||
user_name = user_name.lower()
|
|
||||||
|
# append identifiers to results
|
||||||
libraries = self.query(f"/Users/{user_id}/Views", "get")["Items"]
|
if identifiers:
|
||||||
|
results["Identifiers"] = identifiers
|
||||||
for library in libraries:
|
|
||||||
library_title = library["Name"]
|
return results
|
||||||
library_id = library["Id"]
|
|
||||||
watched = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=true&ParentId={library_id}&Filters=IsPlayed&limit=1", "get")
|
except Exception as e:
|
||||||
|
logger(f"Jellyfin: Query {query_type} {query}\nResults {results}\n{e}", 2)
|
||||||
if len(watched["Items"]) == 0:
|
raise Exception(e)
|
||||||
logger(f"Jellyfin: No watched items found in library {library_title}", 1)
|
|
||||||
continue
|
async def get_users(self):
|
||||||
else:
|
try:
|
||||||
library_type = watched["Items"][0]["Type"]
|
users = {}
|
||||||
|
|
||||||
skip_reason = check_skip_logic(library_title, library_type, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping)
|
query_string = "/Users"
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
if skip_reason:
|
response = await self.query(query_string, "get", session)
|
||||||
logger(f"Jellyfin: Skipping library {library_title} {skip_reason}", 1)
|
|
||||||
continue
|
# If response is not empty
|
||||||
|
if response:
|
||||||
logger(f"Jellyfin: Generating watched for {user_name} in library {library_title}", 0)
|
for user in response:
|
||||||
# Movies
|
users[user["Name"]] = user["Id"]
|
||||||
if library_type == "Movie":
|
|
||||||
watched = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=true&ParentId={library_id}&Filters=IsPlayed&Fields=ItemCounts,ProviderIds", "get")
|
return users
|
||||||
for movie in watched["Items"]:
|
except Exception as e:
|
||||||
if movie["UserData"]["Played"] == True:
|
logger(f"Jellyfin: Get users failed {e}", 2)
|
||||||
if movie["ProviderIds"]:
|
raise Exception(e)
|
||||||
if user_name not in users_watched:
|
|
||||||
users_watched[user_name] = {}
|
async def get_user_library_watched(
|
||||||
if library_title not in users_watched[user_name]:
|
self, user_name, user_id, library_type, library_id, library_title
|
||||||
users_watched[user_name][library_title] = []
|
):
|
||||||
# Lowercase movie["ProviderIds"] keys
|
try:
|
||||||
movie["ProviderIds"] = {k.lower(): v for k, v in movie["ProviderIds"].items()}
|
user_name = user_name.lower()
|
||||||
users_watched[user_name][library_title].append(movie["ProviderIds"])
|
user_watched = {}
|
||||||
|
user_watched[user_name] = {}
|
||||||
# TV Shows
|
|
||||||
if library_type == "Episode":
|
logger(
|
||||||
watched = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=true&ParentId={library_id}&Fields=ItemCounts,ProviderIds", "get")
|
f"Jellyfin: Generating watched for {user_name} in library {library_title}",
|
||||||
watched_shows = [x for x in watched["Items"] if x["Type"] == "Series"]
|
0,
|
||||||
|
)
|
||||||
for show in watched_shows:
|
|
||||||
show_guids = {k.lower(): v for k, v in show["ProviderIds"].items()}
|
async with aiohttp.ClientSession() as session:
|
||||||
show_guids["title"] = show["Name"]
|
# Movies
|
||||||
show_guids = frozenset(show_guids.items())
|
if library_type == "Movie":
|
||||||
seasons = self.query(f"/Shows/{show['Id']}/Seasons?userId={user_id}&Fields=ItemCounts,ProviderIds", "get")
|
user_watched[user_name][library_title] = []
|
||||||
if len(seasons["Items"]) > 0:
|
watched = await self.query(
|
||||||
for season in seasons["Items"]:
|
f"/Users/{user_id}/Items"
|
||||||
episodes = self.query(f"/Shows/{show['Id']}/Episodes?seasonId={season['Id']}&userId={user_id}&Fields=ItemCounts,ProviderIds", "get")
|
+ f"?ParentId={library_id}&Filters=IsPlayed&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||||
if len(episodes["Items"]) > 0:
|
"get",
|
||||||
for episode in episodes["Items"]:
|
session,
|
||||||
if episode["UserData"]["Played"] == True:
|
)
|
||||||
if episode["ProviderIds"]:
|
|
||||||
if user_name not in users_watched:
|
for movie in watched["Items"]:
|
||||||
users_watched[user_name] = {}
|
# Check if the movie has been played
|
||||||
if library_title not in users_watched[user_name]:
|
if (
|
||||||
users_watched[user_name][library_title] = {}
|
movie["UserData"]["Played"] is True
|
||||||
if show_guids not in users_watched[user_name][library_title]:
|
and "MediaSources" in movie
|
||||||
users_watched[user_name][library_title][show_guids] = {}
|
and movie["MediaSources"] is not {}
|
||||||
if season["Name"] not in users_watched[user_name][library_title][show_guids]:
|
):
|
||||||
users_watched[user_name][library_title][show_guids][season["Name"]] = []
|
logger(
|
||||||
|
f"Jellyfin: Adding {movie['Name']} to {user_name} watched list",
|
||||||
# Lowercase episode["ProviderIds"] keys
|
3,
|
||||||
episode["ProviderIds"] = {k.lower(): v for k, v in episode["ProviderIds"].items()}
|
)
|
||||||
users_watched[user_name][library_title][show_guids][season["Name"]].append(episode["ProviderIds"])
|
if "ProviderIds" in movie:
|
||||||
|
logger(
|
||||||
return users_watched
|
f"Jellyfin: {movie['Name']} {movie['ProviderIds']} {movie['MediaSources']}",
|
||||||
|
3,
|
||||||
def update_watched(self, watched_list, user_mapping=None, library_mapping=None, dryrun=False):
|
)
|
||||||
for user, libraries in watched_list.items():
|
else:
|
||||||
if user_mapping:
|
logger(
|
||||||
user_other = None
|
f"Jellyfin: {movie['Name']} {movie['MediaSources']['Path']}",
|
||||||
|
3,
|
||||||
if user in user_mapping.keys():
|
)
|
||||||
user_other = user_mapping[user]
|
|
||||||
elif user in user_mapping.values():
|
# Create a dictionary for the movie with its title
|
||||||
user_other = search_mapping(user_mapping, user)
|
movie_guids = {"title": movie["Name"]}
|
||||||
|
|
||||||
if user_other:
|
# If the movie has provider IDs, add them to the dictionary
|
||||||
logger(f"Swapping user {user} with {user_other}", 1)
|
if "ProviderIds" in movie:
|
||||||
user = user_other
|
movie_guids.update(
|
||||||
|
{
|
||||||
user_id = None
|
k.lower(): v
|
||||||
for key in self.users.keys():
|
for k, v in movie["ProviderIds"].items()
|
||||||
if user.lower() == key.lower():
|
}
|
||||||
user_id = self.users[key]
|
)
|
||||||
break
|
|
||||||
|
# If the movie has media sources, add them to the dictionary
|
||||||
if not user_id:
|
if "MediaSources" in movie:
|
||||||
logger(f"{user} not found in Jellyfin", 2)
|
movie_guids["locations"] = tuple(
|
||||||
break
|
[
|
||||||
|
x["Path"].split("/")[-1]
|
||||||
jellyfin_libraries = self.query(f"/Users/{user_id}/Views", "get")["Items"]
|
for x in movie["MediaSources"]
|
||||||
|
]
|
||||||
for library, videos in libraries.items():
|
)
|
||||||
if library_mapping:
|
|
||||||
library_other = None
|
# Append the movie dictionary to the list for the given user and library
|
||||||
|
user_watched[user_name][library_title].append(movie_guids)
|
||||||
if library in library_mapping.keys():
|
logger(
|
||||||
library_other = library_mapping[library]
|
f"Jellyfin: Added {movie_guids} to {user_name} watched list",
|
||||||
elif library in library_mapping.values():
|
3,
|
||||||
library_other = search_mapping(library_mapping, library)
|
)
|
||||||
|
|
||||||
if library_other:
|
# TV Shows
|
||||||
logger(f"Swapping library {library} with {library_other}", 1)
|
if library_type in ["Series", "Episode"]:
|
||||||
library = library_other
|
# Initialize an empty dictionary for the given user and library
|
||||||
|
user_watched[user_name][library_title] = {}
|
||||||
if library not in [x["Name"] for x in jellyfin_libraries]:
|
|
||||||
logger(f"{library} not found in Jellyfin", 2)
|
# Retrieve a list of watched TV shows
|
||||||
continue
|
watched_shows = await self.query(
|
||||||
|
f"/Users/{user_id}/Items"
|
||||||
library_id = None
|
+ f"?ParentId={library_id}&isPlaceHolder=false&IncludeItemTypes=Series&Recursive=True&Fields=ProviderIds,Path,RecursiveItemCount",
|
||||||
for jellyfin_library in jellyfin_libraries:
|
"get",
|
||||||
if jellyfin_library["Name"] == library:
|
session,
|
||||||
library_id = jellyfin_library["Id"]
|
)
|
||||||
continue
|
|
||||||
|
# Filter the list of shows to only include those that have been partially or fully watched
|
||||||
if library_id:
|
watched_shows_filtered = []
|
||||||
logger(f"Jellyfin: Updating watched for {user} in library {library}", 1)
|
for show in watched_shows["Items"]:
|
||||||
library_search = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=true&ParentId={library_id}&limit=1", "get")
|
if "PlayedPercentage" in show["UserData"]:
|
||||||
library_type = library_search["Items"][0]["Type"]
|
if show["UserData"]["PlayedPercentage"] > 0:
|
||||||
|
watched_shows_filtered.append(show)
|
||||||
# Movies
|
|
||||||
if library_type == "Movie":
|
# Create a list of tasks to retrieve the seasons of each watched show
|
||||||
_, _, videos_movies_ids = generate_library_guids_dict(videos, 2)
|
seasons_tasks = []
|
||||||
|
for show in watched_shows_filtered:
|
||||||
jellyfin_search = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=false&ParentId={library_id}&isPlayed=false&Fields=ItemCounts,ProviderIds", "get")
|
logger(
|
||||||
for jellyfin_video in jellyfin_search["Items"]:
|
f"Jellyfin: Adding {show['Name']} to {user_name} watched list",
|
||||||
if str_to_bool(jellyfin_video["UserData"]["Played"]) == False:
|
3,
|
||||||
jellyfin_video_id = jellyfin_video["Id"]
|
)
|
||||||
|
show_guids = {
|
||||||
for movie_provider_source, movie_provider_id in jellyfin_video["ProviderIds"].items():
|
k.lower(): v for k, v in show["ProviderIds"].items()
|
||||||
if movie_provider_source.lower() in videos_movies_ids:
|
}
|
||||||
if movie_provider_id.lower() in videos_movies_ids[movie_provider_source.lower()]:
|
show_guids["title"] = show["Name"]
|
||||||
msg = f"{jellyfin_video['Name']} as watched for {user} in {library} for Jellyfin"
|
show_guids["locations"] = tuple([show["Path"].split("/")[-1]])
|
||||||
if not dryrun:
|
show_guids = frozenset(show_guids.items())
|
||||||
logger(f"Marking {msg}", 0)
|
show_identifiers = {
|
||||||
self.query(f"/Users/{user_id}/PlayedItems/{jellyfin_video_id}", "post")
|
"show_guids": show_guids,
|
||||||
else:
|
"show_id": show["Id"],
|
||||||
logger(f"Dryrun {msg}", 0)
|
}
|
||||||
break
|
|
||||||
|
season_task = asyncio.ensure_future(
|
||||||
# TV Shows
|
self.query(
|
||||||
if library_type == "Episode":
|
f"/Shows/{show['Id']}/Seasons"
|
||||||
videos_shows_ids, videos_episode_ids, _ = generate_library_guids_dict(videos, 3)
|
+ f"?userId={user_id}&isPlaceHolder=false&Fields=ProviderIds,RecursiveItemCount",
|
||||||
|
"get",
|
||||||
jellyfin_search = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=false&ParentId={library_id}&isPlayed=false&Fields=ItemCounts,ProviderIds", "get")
|
session,
|
||||||
jellyfin_shows = [x for x in jellyfin_search["Items"]]
|
frozenset(show_identifiers.items()),
|
||||||
|
)
|
||||||
for jellyfin_show in jellyfin_shows:
|
)
|
||||||
show_found = False
|
seasons_tasks.append(season_task)
|
||||||
for show_provider_source, show_provider_id in jellyfin_show["ProviderIds"].items():
|
|
||||||
if show_provider_source.lower() in videos_shows_ids:
|
# Retrieve the seasons for each watched show
|
||||||
if show_provider_id.lower() in videos_shows_ids[show_provider_source.lower()]:
|
seasons_watched = await asyncio.gather(*seasons_tasks)
|
||||||
show_found = True
|
|
||||||
jellyfin_show_id = jellyfin_show["Id"]
|
# Filter the list of seasons to only include those that have been partially or fully watched
|
||||||
jellyfin_episodes = self.query(f"/Shows/{jellyfin_show_id}/Episodes?userId={user_id}&Fields=ItemCounts,ProviderIds", "get")
|
seasons_watched_filtered = []
|
||||||
for jellyfin_episode in jellyfin_episodes["Items"]:
|
for seasons in seasons_watched:
|
||||||
if str_to_bool(jellyfin_episode["UserData"]["Played"]) == False:
|
seasons_watched_filtered_dict = {}
|
||||||
jellyfin_episode_id = jellyfin_episode["Id"]
|
seasons_watched_filtered_dict["Identifiers"] = seasons[
|
||||||
|
"Identifiers"
|
||||||
for episode_provider_source, episode_provider_id in jellyfin_episode["ProviderIds"].items():
|
]
|
||||||
if episode_provider_source.lower() in videos_episode_ids:
|
seasons_watched_filtered_dict["Items"] = []
|
||||||
if episode_provider_id.lower() in videos_episode_ids[episode_provider_source.lower()]:
|
for season in seasons["Items"]:
|
||||||
msg = f"{jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode['IndexNumber']} {jellyfin_episode['Name']} as watched for {user} in {library} for Jellyfin"
|
if "PlayedPercentage" in season["UserData"]:
|
||||||
if not dryrun:
|
if season["UserData"]["PlayedPercentage"] > 0:
|
||||||
logger(f"Marked {msg}", 0)
|
seasons_watched_filtered_dict["Items"].append(
|
||||||
self.query(f"/Users/{user_id}/PlayedItems/{jellyfin_episode_id}", "post")
|
season
|
||||||
else:
|
)
|
||||||
logger(f"Dryrun {msg}", 0)
|
|
||||||
break
|
if seasons_watched_filtered_dict["Items"]:
|
||||||
|
seasons_watched_filtered.append(
|
||||||
if show_found:
|
seasons_watched_filtered_dict
|
||||||
break
|
)
|
||||||
|
|
||||||
|
# Create a list of tasks to retrieve the episodes of each watched season
|
||||||
|
episodes_tasks = []
|
||||||
|
for seasons in seasons_watched_filtered:
|
||||||
|
if len(seasons["Items"]) > 0:
|
||||||
|
for season in seasons["Items"]:
|
||||||
|
season_identifiers = dict(seasons["Identifiers"])
|
||||||
|
season_identifiers["season_id"] = season["Id"]
|
||||||
|
season_identifiers["season_name"] = season["Name"]
|
||||||
|
episode_task = asyncio.ensure_future(
|
||||||
|
self.query(
|
||||||
|
f"/Shows/{season_identifiers['show_id']}/Episodes"
|
||||||
|
+ f"?seasonId={season['Id']}&userId={user_id}&isPlaceHolder=false&isPlayed=true&Fields=ProviderIds,MediaSources",
|
||||||
|
"get",
|
||||||
|
session,
|
||||||
|
frozenset(season_identifiers.items()),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
episodes_tasks.append(episode_task)
|
||||||
|
|
||||||
|
# Retrieve the episodes for each watched season
|
||||||
|
watched_episodes = await asyncio.gather(*episodes_tasks)
|
||||||
|
|
||||||
|
# Iterate through the watched episodes
|
||||||
|
for episodes in watched_episodes:
|
||||||
|
# If the season has any watched episodes
|
||||||
|
if len(episodes["Items"]) > 0:
|
||||||
|
# Create a dictionary for the season with its identifier and episodes
|
||||||
|
season_dict = {}
|
||||||
|
season_dict["Identifiers"] = dict(episodes["Identifiers"])
|
||||||
|
season_dict["Episodes"] = []
|
||||||
|
for episode in episodes["Items"]:
|
||||||
|
if (
|
||||||
|
episode["UserData"]["Played"] is True
|
||||||
|
and "MediaSources" in episode
|
||||||
|
and episode["MediaSources"] is not {}
|
||||||
|
):
|
||||||
|
# Create a dictionary for the episode with its provider IDs and media sources
|
||||||
|
episode_dict = {
|
||||||
|
k.lower(): v
|
||||||
|
for k, v in episode["ProviderIds"].items()
|
||||||
|
}
|
||||||
|
episode_dict["title"] = episode["Name"]
|
||||||
|
episode_dict["locations"] = tuple(
|
||||||
|
[
|
||||||
|
x["Path"].split("/")[-1]
|
||||||
|
for x in episode["MediaSources"]
|
||||||
|
]
|
||||||
|
)
|
||||||
|
# Add the episode dictionary to the season's list of episodes
|
||||||
|
season_dict["Episodes"].append(episode_dict)
|
||||||
|
# Add the season dictionary to the show's list of seasons
|
||||||
|
if (
|
||||||
|
season_dict["Identifiers"]["show_guids"]
|
||||||
|
not in user_watched[user_name][library_title]
|
||||||
|
):
|
||||||
|
user_watched[user_name][library_title][
|
||||||
|
season_dict["Identifiers"]["show_guids"]
|
||||||
|
] = {}
|
||||||
|
|
||||||
|
if (
|
||||||
|
season_dict["Identifiers"]["season_name"]
|
||||||
|
not in user_watched[user_name][library_title][
|
||||||
|
season_dict["Identifiers"]["show_guids"]
|
||||||
|
]
|
||||||
|
):
|
||||||
|
user_watched[user_name][library_title][
|
||||||
|
season_dict["Identifiers"]["show_guids"]
|
||||||
|
][season_dict["Identifiers"]["season_name"]] = []
|
||||||
|
|
||||||
|
user_watched[user_name][library_title][
|
||||||
|
season_dict["Identifiers"]["show_guids"]
|
||||||
|
][season_dict["Identifiers"]["season_name"]] = season_dict[
|
||||||
|
"Episodes"
|
||||||
|
]
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Added {season_dict['Episodes']} to {user_name} {season_dict['Identifiers']['show_guids']} watched list",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Got watched for {user_name} in library {library_title}", 1
|
||||||
|
)
|
||||||
|
if library_title in user_watched[user_name]:
|
||||||
|
logger(f"Jellyfin: {user_watched[user_name][library_title]}", 3)
|
||||||
|
|
||||||
|
return user_watched
|
||||||
|
except Exception as e:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Failed to get watched for {user_name} in library {library_title}, Error: {e}",
|
||||||
|
2,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger(traceback.format_exc(), 2)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
async def get_users_watched(
|
||||||
|
self,
|
||||||
|
user_name,
|
||||||
|
user_id,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
# Get all libraries
|
||||||
|
user_name = user_name.lower()
|
||||||
|
tasks_watched = []
|
||||||
|
|
||||||
|
tasks_libraries = []
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
libraries = await self.query(f"/Users/{user_id}/Views", "get", session)
|
||||||
|
for library in libraries["Items"]:
|
||||||
|
library_id = library["Id"]
|
||||||
|
library_title = library["Name"]
|
||||||
|
identifiers = {
|
||||||
|
"library_id": library_id,
|
||||||
|
"library_title": library_title,
|
||||||
|
}
|
||||||
|
task = asyncio.ensure_future(
|
||||||
|
self.query(
|
||||||
|
f"/Users/{user_id}/Items"
|
||||||
|
+ f"?ParentId={library_id}&Filters=IsPlayed&Recursive=True&excludeItemTypes=Folder&limit=100",
|
||||||
|
"get",
|
||||||
|
session,
|
||||||
|
identifiers=identifiers,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
tasks_libraries.append(task)
|
||||||
|
|
||||||
|
libraries = await asyncio.gather(
|
||||||
|
*tasks_libraries, return_exceptions=True
|
||||||
|
)
|
||||||
|
|
||||||
|
for watched in libraries:
|
||||||
|
if len(watched["Items"]) == 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
library_id = watched["Identifiers"]["library_id"]
|
||||||
|
library_title = watched["Identifiers"]["library_title"]
|
||||||
|
# Get all library types excluding "Folder"
|
||||||
|
types = set(
|
||||||
|
[
|
||||||
|
x["Type"]
|
||||||
|
for x in watched["Items"]
|
||||||
|
if x["Type"] in ["Movie", "Series", "Episode"]
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
skip_reason = check_skip_logic(
|
||||||
|
library_title,
|
||||||
|
types,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
if skip_reason:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Skipping library {library_title}: {skip_reason}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# If there are multiple types in library raise error
|
||||||
|
if types is None or len(types) < 1:
|
||||||
|
all_types = set([x["Type"] for x in watched["Items"]])
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Skipping Library {library_title} found types: {types}, all types: {all_types}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
for library_type in types:
|
||||||
|
# Get watched for user
|
||||||
|
task = asyncio.ensure_future(
|
||||||
|
self.get_user_library_watched(
|
||||||
|
user_name,
|
||||||
|
user_id,
|
||||||
|
library_type,
|
||||||
|
library_id,
|
||||||
|
library_title,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
tasks_watched.append(task)
|
||||||
|
|
||||||
|
watched = await asyncio.gather(*tasks_watched, return_exceptions=True)
|
||||||
|
|
||||||
|
return watched
|
||||||
|
except Exception as e:
|
||||||
|
logger(f"Jellyfin: Failed to get users watched, Error: {e}", 2)
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
async def get_watched(
|
||||||
|
self,
|
||||||
|
users,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping=None,
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
users_watched = {}
|
||||||
|
watched = []
|
||||||
|
|
||||||
|
for user_name, user_id in users.items():
|
||||||
|
watched.append(
|
||||||
|
self.get_users_watched(
|
||||||
|
user_name,
|
||||||
|
user_id,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
watched = await asyncio.gather(*watched, return_exceptions=True)
|
||||||
|
for user_watched in watched:
|
||||||
|
user_watched_temp = combine_watched_dicts(user_watched)
|
||||||
|
for user, user_watched_temp in user_watched_temp.items():
|
||||||
|
if user not in users_watched:
|
||||||
|
users_watched[user] = {}
|
||||||
|
users_watched[user].update(user_watched_temp)
|
||||||
|
|
||||||
|
return users_watched
|
||||||
|
except Exception as e:
|
||||||
|
logger(f"Jellyfin: Failed to get watched, Error: {e}", 2)
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
async def update_user_watched(
|
||||||
|
self, user_name, user_id, library, library_id, videos, dryrun
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Updating watched for {user_name} in library {library}", 1
|
||||||
|
)
|
||||||
|
(
|
||||||
|
videos_shows_ids,
|
||||||
|
videos_episodes_ids,
|
||||||
|
videos_movies_ids,
|
||||||
|
) = generate_library_guids_dict(videos)
|
||||||
|
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: mark list\nShows: {videos_shows_ids}\nEpisodes: {videos_episodes_ids}\nMovies: {videos_movies_ids}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
if videos_movies_ids:
|
||||||
|
jellyfin_search = await self.query(
|
||||||
|
f"/Users/{user_id}/Items"
|
||||||
|
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=True&ParentId={library_id}"
|
||||||
|
+ "&isPlayed=false&Fields=ItemCounts,ProviderIds,MediaSources&IncludeItemTypes=Movie",
|
||||||
|
"get",
|
||||||
|
session,
|
||||||
|
)
|
||||||
|
for jellyfin_video in jellyfin_search["Items"]:
|
||||||
|
movie_found = False
|
||||||
|
|
||||||
|
if "MediaSources" in jellyfin_video:
|
||||||
|
for movie_location in jellyfin_video["MediaSources"]:
|
||||||
|
if (
|
||||||
|
movie_location["Path"].split("/")[-1]
|
||||||
|
in videos_movies_ids["locations"]
|
||||||
|
):
|
||||||
|
movie_found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not movie_found:
|
||||||
|
for (
|
||||||
|
movie_provider_source,
|
||||||
|
movie_provider_id,
|
||||||
|
) in jellyfin_video["ProviderIds"].items():
|
||||||
|
if movie_provider_source.lower() in videos_movies_ids:
|
||||||
|
if (
|
||||||
|
movie_provider_id.lower()
|
||||||
|
in videos_movies_ids[
|
||||||
|
movie_provider_source.lower()
|
||||||
|
]
|
||||||
|
):
|
||||||
|
movie_found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if movie_found:
|
||||||
|
jellyfin_video_id = jellyfin_video["Id"]
|
||||||
|
msg = f"{jellyfin_video['Name']} as watched for {user_name} in {library} for Jellyfin"
|
||||||
|
if not dryrun:
|
||||||
|
logger(f"Marking {msg}", 0)
|
||||||
|
await self.query(
|
||||||
|
f"/Users/{user_id}/PlayedItems/{jellyfin_video_id}",
|
||||||
|
"post",
|
||||||
|
session,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger(f"Dryrun {msg}", 0)
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Skipping movie {jellyfin_video['Name']} as it is not in mark list for {user_name}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
|
||||||
|
# TV Shows
|
||||||
|
if videos_shows_ids and videos_episodes_ids:
|
||||||
|
jellyfin_search = await self.query(
|
||||||
|
f"/Users/{user_id}/Items"
|
||||||
|
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=True&ParentId={library_id}"
|
||||||
|
+ "&isPlayed=false&Fields=ItemCounts,ProviderIds,Path&IncludeItemTypes=Series",
|
||||||
|
"get",
|
||||||
|
session,
|
||||||
|
)
|
||||||
|
jellyfin_shows = [x for x in jellyfin_search["Items"]]
|
||||||
|
|
||||||
|
for jellyfin_show in jellyfin_shows:
|
||||||
|
show_found = False
|
||||||
|
|
||||||
|
if "Path" in jellyfin_show:
|
||||||
|
if (
|
||||||
|
jellyfin_show["Path"].split("/")[-1]
|
||||||
|
in videos_shows_ids["locations"]
|
||||||
|
):
|
||||||
|
show_found = True
|
||||||
|
|
||||||
|
if not show_found:
|
||||||
|
for show_provider_source, show_provider_id in jellyfin_show[
|
||||||
|
"ProviderIds"
|
||||||
|
].items():
|
||||||
|
if show_provider_source.lower() in videos_shows_ids:
|
||||||
|
if (
|
||||||
|
show_provider_id.lower()
|
||||||
|
in videos_shows_ids[
|
||||||
|
show_provider_source.lower()
|
||||||
|
]
|
||||||
|
):
|
||||||
|
show_found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if show_found:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Updating watched for {user_name} in library {library} for show {jellyfin_show['Name']}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
jellyfin_show_id = jellyfin_show["Id"]
|
||||||
|
jellyfin_episodes = await self.query(
|
||||||
|
f"/Shows/{jellyfin_show_id}/Episodes"
|
||||||
|
+ f"?userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||||
|
"get",
|
||||||
|
session,
|
||||||
|
)
|
||||||
|
|
||||||
|
for jellyfin_episode in jellyfin_episodes["Items"]:
|
||||||
|
episode_found = False
|
||||||
|
|
||||||
|
if "MediaSources" in jellyfin_episode:
|
||||||
|
for episode_location in jellyfin_episode[
|
||||||
|
"MediaSources"
|
||||||
|
]:
|
||||||
|
if (
|
||||||
|
episode_location["Path"].split("/")[-1]
|
||||||
|
in videos_episodes_ids["locations"]
|
||||||
|
):
|
||||||
|
episode_found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not episode_found:
|
||||||
|
for (
|
||||||
|
episode_provider_source,
|
||||||
|
episode_provider_id,
|
||||||
|
) in jellyfin_episode["ProviderIds"].items():
|
||||||
|
if (
|
||||||
|
episode_provider_source.lower()
|
||||||
|
in videos_episodes_ids
|
||||||
|
):
|
||||||
|
if (
|
||||||
|
episode_provider_id.lower()
|
||||||
|
in videos_episodes_ids[
|
||||||
|
episode_provider_source.lower()
|
||||||
|
]
|
||||||
|
):
|
||||||
|
episode_found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if episode_found:
|
||||||
|
jellyfin_episode_id = jellyfin_episode["Id"]
|
||||||
|
msg = (
|
||||||
|
f"{jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode['Name']}"
|
||||||
|
+ f" as watched for {user_name} in {library} for Jellyfin"
|
||||||
|
)
|
||||||
|
if not dryrun:
|
||||||
|
logger(f"Marked {msg}", 0)
|
||||||
|
await self.query(
|
||||||
|
f"/Users/{user_id}/PlayedItems/{jellyfin_episode_id}",
|
||||||
|
"post",
|
||||||
|
session,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger(f"Dryrun {msg}", 0)
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Skipping episode {jellyfin_episode['Name']} as it is not in mark list for {user_name}",
|
||||||
|
3,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Skipping show {jellyfin_show['Name']} as it is not in mark list for {user_name}",
|
||||||
|
3,
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
not videos_movies_ids
|
||||||
|
and not videos_shows_ids
|
||||||
|
and not videos_episodes_ids
|
||||||
|
):
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: No videos to mark as watched for {user_name} in library {library}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Error updating watched for {user_name} in library {library}, {e}",
|
||||||
|
2,
|
||||||
|
)
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
async def update_watched(
|
||||||
|
self, watched_list, user_mapping=None, library_mapping=None, dryrun=False
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
tasks = []
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
for user, libraries in watched_list.items():
|
||||||
|
logger(f"Jellyfin: Updating for entry {user}, {libraries}", 1)
|
||||||
|
user_other = None
|
||||||
|
user_name = None
|
||||||
|
if user_mapping:
|
||||||
|
if user in user_mapping.keys():
|
||||||
|
user_other = user_mapping[user]
|
||||||
|
elif user in user_mapping.values():
|
||||||
|
user_other = search_mapping(user_mapping, user)
|
||||||
|
|
||||||
|
user_id = None
|
||||||
|
for key in self.users.keys():
|
||||||
|
if user.lower() == key.lower():
|
||||||
|
user_id = self.users[key]
|
||||||
|
user_name = key
|
||||||
|
break
|
||||||
|
elif user_other and user_other.lower() == key.lower():
|
||||||
|
user_id = self.users[key]
|
||||||
|
user_name = key
|
||||||
|
break
|
||||||
|
|
||||||
|
if not user_id:
|
||||||
|
logger(f"{user} {user_other} not found in Jellyfin", 2)
|
||||||
|
continue
|
||||||
|
|
||||||
|
jellyfin_libraries = await self.query(
|
||||||
|
f"/Users/{user_id}/Views", "get", session
|
||||||
|
)
|
||||||
|
jellyfin_libraries = [x for x in jellyfin_libraries["Items"]]
|
||||||
|
|
||||||
|
for library, videos in libraries.items():
|
||||||
|
library_other = None
|
||||||
|
if library_mapping:
|
||||||
|
if library in library_mapping.keys():
|
||||||
|
library_other = library_mapping[library]
|
||||||
|
elif library in library_mapping.values():
|
||||||
|
library_other = search_mapping(library_mapping, library)
|
||||||
|
|
||||||
|
if library.lower() not in [
|
||||||
|
x["Name"].lower() for x in jellyfin_libraries
|
||||||
|
]:
|
||||||
|
if library_other:
|
||||||
|
if library_other.lower() in [
|
||||||
|
x["Name"].lower() for x in jellyfin_libraries
|
||||||
|
]:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Library {library} not found, but {library_other} found, using {library_other}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
library = library_other
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Library {library} or {library_other} not found in library list",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Library {library} not found in library list",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
library_id = None
|
||||||
|
for jellyfin_library in jellyfin_libraries:
|
||||||
|
if jellyfin_library["Name"] == library:
|
||||||
|
library_id = jellyfin_library["Id"]
|
||||||
|
continue
|
||||||
|
|
||||||
|
if library_id:
|
||||||
|
task = self.update_user_watched(
|
||||||
|
user_name, user_id, library, library_id, videos, dryrun
|
||||||
|
)
|
||||||
|
tasks.append(task)
|
||||||
|
|
||||||
|
await asyncio.gather(*tasks, return_exceptions=True)
|
||||||
|
except Exception as e:
|
||||||
|
logger(f"Jellyfin: Error updating watched, {e}", 2)
|
||||||
|
raise Exception(e)
|
||||||
|
|||||||
212
src/library.py
Normal file
212
src/library.py
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
from src.functions import (
|
||||||
|
logger,
|
||||||
|
search_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def check_skip_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping=None,
|
||||||
|
):
|
||||||
|
skip_reason = None
|
||||||
|
library_other = None
|
||||||
|
if library_mapping:
|
||||||
|
library_other = search_mapping(library_mapping, library_title)
|
||||||
|
|
||||||
|
skip_reason_black = check_blacklist_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
blacklist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
library_other,
|
||||||
|
)
|
||||||
|
skip_reason_white = check_whitelist_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
whitelist_library,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_other,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Combine skip reasons
|
||||||
|
if skip_reason_black:
|
||||||
|
skip_reason = skip_reason_black
|
||||||
|
|
||||||
|
if skip_reason_white:
|
||||||
|
if skip_reason:
|
||||||
|
skip_reason = skip_reason + " and " + skip_reason_white
|
||||||
|
else:
|
||||||
|
skip_reason = skip_reason_white
|
||||||
|
|
||||||
|
return skip_reason
|
||||||
|
|
||||||
|
|
||||||
|
def check_blacklist_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
blacklist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
library_other=None,
|
||||||
|
):
|
||||||
|
skip_reason = None
|
||||||
|
if isinstance(library_type, (list, tuple, set)):
|
||||||
|
for library_type_item in library_type:
|
||||||
|
if library_type_item.lower() in blacklist_library_type:
|
||||||
|
skip_reason = f"{library_type_item} is in blacklist_library_type"
|
||||||
|
else:
|
||||||
|
if library_type.lower() in blacklist_library_type:
|
||||||
|
skip_reason = f"{library_type} is in blacklist_library_type"
|
||||||
|
|
||||||
|
if library_title.lower() in [x.lower() for x in blacklist_library]:
|
||||||
|
if skip_reason:
|
||||||
|
skip_reason = (
|
||||||
|
skip_reason + " and " + f"{library_title} is in blacklist_library"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
skip_reason = f"{library_title} is in blacklist_library"
|
||||||
|
|
||||||
|
if library_other:
|
||||||
|
if library_other.lower() in [x.lower() for x in blacklist_library]:
|
||||||
|
if skip_reason:
|
||||||
|
skip_reason = (
|
||||||
|
skip_reason + " and " + f"{library_other} is in blacklist_library"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
skip_reason = f"{library_other} is in blacklist_library"
|
||||||
|
|
||||||
|
return skip_reason
|
||||||
|
|
||||||
|
|
||||||
|
def check_whitelist_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
whitelist_library,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_other=None,
|
||||||
|
):
|
||||||
|
skip_reason = None
|
||||||
|
if len(whitelist_library_type) > 0:
|
||||||
|
if isinstance(library_type, (list, tuple, set)):
|
||||||
|
for library_type_item in library_type:
|
||||||
|
if library_type_item.lower() not in whitelist_library_type:
|
||||||
|
skip_reason = (
|
||||||
|
f"{library_type_item} is not in whitelist_library_type"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if library_type.lower() not in whitelist_library_type:
|
||||||
|
skip_reason = f"{library_type} is not in whitelist_library_type"
|
||||||
|
|
||||||
|
# if whitelist is not empty and library is not in whitelist
|
||||||
|
if len(whitelist_library) > 0:
|
||||||
|
if library_other:
|
||||||
|
if library_title.lower() not in [
|
||||||
|
x.lower() for x in whitelist_library
|
||||||
|
] and library_other.lower() not in [x.lower() for x in whitelist_library]:
|
||||||
|
if skip_reason:
|
||||||
|
skip_reason = (
|
||||||
|
skip_reason
|
||||||
|
+ " and "
|
||||||
|
+ f"{library_title} is not in whitelist_library"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
skip_reason = f"{library_title} is not in whitelist_library"
|
||||||
|
else:
|
||||||
|
if library_title.lower() not in [x.lower() for x in whitelist_library]:
|
||||||
|
if skip_reason:
|
||||||
|
skip_reason = (
|
||||||
|
skip_reason
|
||||||
|
+ " and "
|
||||||
|
+ f"{library_title} is not in whitelist_library"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
skip_reason = f"{library_title} is not in whitelist_library"
|
||||||
|
|
||||||
|
return skip_reason
|
||||||
|
|
||||||
|
|
||||||
|
def show_title_dict(user_list: dict):
|
||||||
|
try:
|
||||||
|
show_output_dict = {}
|
||||||
|
|
||||||
|
show_output_keys = user_list.keys()
|
||||||
|
show_output_keys = [dict(x) for x in list(show_output_keys)]
|
||||||
|
for show_key in show_output_keys:
|
||||||
|
for provider_key, provider_value in show_key.items():
|
||||||
|
# Skip title
|
||||||
|
if provider_key.lower() == "title":
|
||||||
|
continue
|
||||||
|
if provider_key.lower() not in show_output_dict:
|
||||||
|
show_output_dict[provider_key.lower()] = []
|
||||||
|
if provider_key.lower() == "locations":
|
||||||
|
for show_location in provider_value:
|
||||||
|
show_output_dict[provider_key.lower()].append(show_location)
|
||||||
|
else:
|
||||||
|
show_output_dict[provider_key.lower()].append(
|
||||||
|
provider_value.lower()
|
||||||
|
)
|
||||||
|
|
||||||
|
return show_output_dict
|
||||||
|
except Exception:
|
||||||
|
logger("Generating show_output_dict failed, skipping", 1)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def episode_title_dict(user_list: dict):
|
||||||
|
try:
|
||||||
|
episode_output_dict = {}
|
||||||
|
for show in user_list:
|
||||||
|
for season in user_list[show]:
|
||||||
|
for episode in user_list[show][season]:
|
||||||
|
for episode_key, episode_value in episode.items():
|
||||||
|
if episode_key.lower() not in episode_output_dict:
|
||||||
|
episode_output_dict[episode_key.lower()] = []
|
||||||
|
if episode_key == "locations":
|
||||||
|
for episode_location in episode_value:
|
||||||
|
episode_output_dict[episode_key.lower()].append(
|
||||||
|
episode_location
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
episode_output_dict[episode_key.lower()].append(
|
||||||
|
episode_value.lower()
|
||||||
|
)
|
||||||
|
|
||||||
|
return episode_output_dict
|
||||||
|
except Exception:
|
||||||
|
logger("Generating episode_output_dict failed, skipping", 1)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def movies_title_dict(user_list: dict):
|
||||||
|
try:
|
||||||
|
movies_output_dict = {}
|
||||||
|
for movie in user_list:
|
||||||
|
for movie_key, movie_value in movie.items():
|
||||||
|
if movie_key.lower() not in movies_output_dict:
|
||||||
|
movies_output_dict[movie_key.lower()] = []
|
||||||
|
if movie_key == "locations":
|
||||||
|
for movie_location in movie_value:
|
||||||
|
movies_output_dict[movie_key.lower()].append(movie_location)
|
||||||
|
else:
|
||||||
|
movies_output_dict[movie_key.lower()].append(movie_value.lower())
|
||||||
|
|
||||||
|
return movies_output_dict
|
||||||
|
except Exception:
|
||||||
|
logger("Generating movies_output_dict failed, skipping", 1)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def generate_library_guids_dict(user_list: dict):
|
||||||
|
# Handle the case where user_list is empty or does not contain the expected keys and values
|
||||||
|
if not user_list:
|
||||||
|
return {}, {}, {}
|
||||||
|
|
||||||
|
show_output_dict = show_title_dict(user_list)
|
||||||
|
episode_output_dict = episode_title_dict(user_list)
|
||||||
|
movies_output_dict = movies_title_dict(user_list)
|
||||||
|
|
||||||
|
return show_output_dict, episode_output_dict, movies_output_dict
|
||||||
397
src/main.py
Normal file
397
src/main.py
Normal file
@@ -0,0 +1,397 @@
|
|||||||
|
import os, traceback, json, asyncio
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
from time import sleep, perf_counter
|
||||||
|
|
||||||
|
from src.functions import (
|
||||||
|
logger,
|
||||||
|
str_to_bool,
|
||||||
|
)
|
||||||
|
from src.users import (
|
||||||
|
generate_user_list,
|
||||||
|
combine_user_lists,
|
||||||
|
filter_user_lists,
|
||||||
|
generate_server_users,
|
||||||
|
)
|
||||||
|
from src.watched import (
|
||||||
|
cleanup_watched,
|
||||||
|
)
|
||||||
|
from src.black_white import setup_black_white_lists
|
||||||
|
|
||||||
|
from src.plex import Plex
|
||||||
|
from src.jellyfin import Jellyfin
|
||||||
|
|
||||||
|
load_dotenv(override=True)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_users(
|
||||||
|
server_1, server_2, blacklist_users, whitelist_users, user_mapping=None
|
||||||
|
):
|
||||||
|
server_1_users = generate_user_list(server_1)
|
||||||
|
server_2_users = generate_user_list(server_2)
|
||||||
|
|
||||||
|
users = combine_user_lists(server_1_users, server_2_users, user_mapping)
|
||||||
|
logger(f"User list that exist on both servers {users}", 1)
|
||||||
|
|
||||||
|
users_filtered = filter_user_lists(users, blacklist_users, whitelist_users)
|
||||||
|
logger(f"Filtered user list {users_filtered}", 1)
|
||||||
|
|
||||||
|
output_server_1_users = generate_server_users(server_1, users_filtered)
|
||||||
|
output_server_2_users = generate_server_users(server_2, users_filtered)
|
||||||
|
|
||||||
|
# Check if users is none or empty
|
||||||
|
if output_server_1_users is None or len(output_server_1_users) == 0:
|
||||||
|
raise Exception(
|
||||||
|
f"No users found for server 1 {server_1[0]}, users found {users}, filtered users {users_filtered}, server 1 users {server_1[1].users}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if output_server_2_users is None or len(output_server_2_users) == 0:
|
||||||
|
raise Exception(
|
||||||
|
f"No users found for server 2 {server_2[0]}, users found {users} filtered users {users_filtered}, server 2 users {server_2[1].users}"
|
||||||
|
)
|
||||||
|
|
||||||
|
logger(f"Server 1 users: {output_server_1_users}", 1)
|
||||||
|
logger(f"Server 2 users: {output_server_2_users}", 1)
|
||||||
|
|
||||||
|
return output_server_1_users, output_server_2_users
|
||||||
|
|
||||||
|
|
||||||
|
def generate_server_connections():
|
||||||
|
servers = []
|
||||||
|
|
||||||
|
plex_baseurl = os.getenv("PLEX_BASEURL", None)
|
||||||
|
plex_token = os.getenv("PLEX_TOKEN", None)
|
||||||
|
plex_username = os.getenv("PLEX_USERNAME", None)
|
||||||
|
plex_password = os.getenv("PLEX_PASSWORD", None)
|
||||||
|
plex_servername = os.getenv("PLEX_SERVERNAME", None)
|
||||||
|
ssl_bypass = str_to_bool(os.getenv("SSL_BYPASS", "False"))
|
||||||
|
|
||||||
|
if plex_baseurl and plex_token:
|
||||||
|
plex_baseurl = plex_baseurl.split(",")
|
||||||
|
plex_token = plex_token.split(",")
|
||||||
|
|
||||||
|
if len(plex_baseurl) != len(plex_token):
|
||||||
|
raise Exception(
|
||||||
|
"PLEX_BASEURL and PLEX_TOKEN must have the same number of entries"
|
||||||
|
)
|
||||||
|
|
||||||
|
for i, url in enumerate(plex_baseurl):
|
||||||
|
servers.append(
|
||||||
|
(
|
||||||
|
"plex",
|
||||||
|
Plex(
|
||||||
|
baseurl=url.strip(),
|
||||||
|
token=plex_token[i].strip(),
|
||||||
|
username=None,
|
||||||
|
password=None,
|
||||||
|
servername=None,
|
||||||
|
ssl_bypass=ssl_bypass,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if plex_username and plex_password and plex_servername:
|
||||||
|
plex_username = plex_username.split(",")
|
||||||
|
plex_password = plex_password.split(",")
|
||||||
|
plex_servername = plex_servername.split(",")
|
||||||
|
|
||||||
|
if len(plex_username) != len(plex_password) or len(plex_username) != len(
|
||||||
|
plex_servername
|
||||||
|
):
|
||||||
|
raise Exception(
|
||||||
|
"PLEX_USERNAME, PLEX_PASSWORD and PLEX_SERVERNAME must have the same number of entries"
|
||||||
|
)
|
||||||
|
|
||||||
|
for i, username in enumerate(plex_username):
|
||||||
|
servers.append(
|
||||||
|
(
|
||||||
|
"plex",
|
||||||
|
Plex(
|
||||||
|
baseurl=None,
|
||||||
|
token=None,
|
||||||
|
username=username.strip(),
|
||||||
|
password=plex_password[i].strip(),
|
||||||
|
servername=plex_servername[i].strip(),
|
||||||
|
ssl_bypass=ssl_bypass,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
jellyfin_baseurl = os.getenv("JELLYFIN_BASEURL", None)
|
||||||
|
jellyfin_token = os.getenv("JELLYFIN_TOKEN", None)
|
||||||
|
|
||||||
|
if jellyfin_baseurl and jellyfin_token:
|
||||||
|
jellyfin_baseurl = jellyfin_baseurl.split(",")
|
||||||
|
jellyfin_token = jellyfin_token.split(",")
|
||||||
|
|
||||||
|
if len(jellyfin_baseurl) != len(jellyfin_token):
|
||||||
|
raise Exception(
|
||||||
|
"JELLYFIN_BASEURL and JELLYFIN_TOKEN must have the same number of entries"
|
||||||
|
)
|
||||||
|
|
||||||
|
for i, baseurl in enumerate(jellyfin_baseurl):
|
||||||
|
baseurl = baseurl.strip()
|
||||||
|
if baseurl[-1] == "/":
|
||||||
|
baseurl = baseurl[:-1]
|
||||||
|
servers.append(
|
||||||
|
(
|
||||||
|
"jellyfin",
|
||||||
|
Jellyfin(baseurl=baseurl, token=jellyfin_token[i].strip()),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return servers
|
||||||
|
|
||||||
|
|
||||||
|
def get_server_watched(
|
||||||
|
server_connection: list,
|
||||||
|
users: dict,
|
||||||
|
blacklist_library: list,
|
||||||
|
whitelist_library: list,
|
||||||
|
blacklist_library_type: list,
|
||||||
|
whitelist_library_type: list,
|
||||||
|
library_mapping: dict,
|
||||||
|
):
|
||||||
|
if server_connection[0] == "plex":
|
||||||
|
return server_connection[1].get_watched(
|
||||||
|
users,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
)
|
||||||
|
elif server_connection[0] == "jellyfin":
|
||||||
|
return asyncio.run(
|
||||||
|
server_connection[1].get_watched(
|
||||||
|
users,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def update_server_watched(
|
||||||
|
server_connection: list,
|
||||||
|
server_watched_filtered: dict,
|
||||||
|
user_mapping: dict,
|
||||||
|
library_mapping: dict,
|
||||||
|
dryrun: bool,
|
||||||
|
):
|
||||||
|
if server_connection[0] == "plex":
|
||||||
|
server_connection[1].update_watched(
|
||||||
|
server_watched_filtered, user_mapping, library_mapping, dryrun
|
||||||
|
)
|
||||||
|
elif server_connection[0] == "jellyfin":
|
||||||
|
asyncio.run(
|
||||||
|
server_connection[1].update_watched(
|
||||||
|
server_watched_filtered, user_mapping, library_mapping, dryrun
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def should_sync_server(server_1_type, server_2_type):
|
||||||
|
sync_from_plex_to_jellyfin = str_to_bool(
|
||||||
|
os.getenv("SYNC_FROM_PLEX_TO_JELLYFIN", "True")
|
||||||
|
)
|
||||||
|
sync_from_jelly_to_plex = str_to_bool(
|
||||||
|
os.getenv("SYNC_FROM_JELLYFIN_TO_PLEX", "True")
|
||||||
|
)
|
||||||
|
sync_from_plex_to_plex = str_to_bool(os.getenv("SYNC_FROM_PLEX_TO_PLEX", "True"))
|
||||||
|
sync_from_jelly_to_jellyfin = str_to_bool(
|
||||||
|
os.getenv("SYNC_FROM_JELLYFIN_TO_JELLYFIN", "True")
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
server_1_type == "plex"
|
||||||
|
and server_2_type == "plex"
|
||||||
|
and not sync_from_plex_to_plex
|
||||||
|
):
|
||||||
|
logger("Sync between plex and plex is disabled", 1)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if (
|
||||||
|
server_1_type == "plex"
|
||||||
|
and server_2_type == "jellyfin"
|
||||||
|
and not sync_from_jelly_to_plex
|
||||||
|
):
|
||||||
|
logger("Sync from jellyfin to plex disabled", 1)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if (
|
||||||
|
server_1_type == "jellyfin"
|
||||||
|
and server_2_type == "jellyfin"
|
||||||
|
and not sync_from_jelly_to_jellyfin
|
||||||
|
):
|
||||||
|
logger("Sync between jellyfin and jellyfin is disabled", 1)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if (
|
||||||
|
server_1_type == "jellyfin"
|
||||||
|
and server_2_type == "plex"
|
||||||
|
and not sync_from_plex_to_jellyfin
|
||||||
|
):
|
||||||
|
logger("Sync from plex to jellyfin is disabled", 1)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def main_loop():
|
||||||
|
logfile = os.getenv("LOGFILE", "log.log")
|
||||||
|
# Delete logfile if it exists
|
||||||
|
if os.path.exists(logfile):
|
||||||
|
os.remove(logfile)
|
||||||
|
|
||||||
|
dryrun = str_to_bool(os.getenv("DRYRUN", "False"))
|
||||||
|
logger(f"Dryrun: {dryrun}", 1)
|
||||||
|
|
||||||
|
user_mapping = os.getenv("USER_MAPPING")
|
||||||
|
if user_mapping:
|
||||||
|
user_mapping = json.loads(user_mapping.lower())
|
||||||
|
logger(f"User Mapping: {user_mapping}", 1)
|
||||||
|
|
||||||
|
library_mapping = os.getenv("LIBRARY_MAPPING")
|
||||||
|
if library_mapping:
|
||||||
|
library_mapping = json.loads(library_mapping)
|
||||||
|
logger(f"Library Mapping: {library_mapping}", 1)
|
||||||
|
|
||||||
|
# Create (black/white)lists
|
||||||
|
logger("Creating (black/white)lists", 1)
|
||||||
|
blacklist_library = os.getenv("BLACKLIST_LIBRARY", None)
|
||||||
|
whitelist_library = os.getenv("WHITELIST_LIBRARY", None)
|
||||||
|
blacklist_library_type = os.getenv("BLACKLIST_LIBRARY_TYPE", None)
|
||||||
|
whitelist_library_type = os.getenv("WHITELIST_LIBRARY_TYPE", None)
|
||||||
|
blacklist_users = os.getenv("BLACKLIST_USERS", None)
|
||||||
|
whitelist_users = os.getenv("WHITELIST_USERS", None)
|
||||||
|
|
||||||
|
(
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
blacklist_users,
|
||||||
|
whitelist_users,
|
||||||
|
) = setup_black_white_lists(
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
blacklist_users,
|
||||||
|
whitelist_users,
|
||||||
|
library_mapping,
|
||||||
|
user_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create server connections
|
||||||
|
logger("Creating server connections", 1)
|
||||||
|
servers = generate_server_connections()
|
||||||
|
|
||||||
|
for server_1 in servers:
|
||||||
|
# If server is the final server in the list, then we are done with the loop
|
||||||
|
if server_1 == servers[-1]:
|
||||||
|
break
|
||||||
|
|
||||||
|
# Start server_2 at the next server in the list
|
||||||
|
for server_2 in servers[servers.index(server_1) + 1 :]:
|
||||||
|
# Create users list
|
||||||
|
logger("Creating users list", 1)
|
||||||
|
server_1_users, server_2_users = setup_users(
|
||||||
|
server_1, server_2, blacklist_users, whitelist_users, user_mapping
|
||||||
|
)
|
||||||
|
|
||||||
|
logger("Creating watched lists", 1)
|
||||||
|
server_1_watched = get_server_watched(
|
||||||
|
server_1,
|
||||||
|
server_1_users,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
)
|
||||||
|
logger("Finished creating watched list server 1", 1)
|
||||||
|
server_2_watched = get_server_watched(
|
||||||
|
server_2,
|
||||||
|
server_2_users,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
)
|
||||||
|
logger("Finished creating watched list server 2", 1)
|
||||||
|
logger(f"Server 1 watched: {server_1_watched}", 3)
|
||||||
|
logger(f"Server 2 watched: {server_2_watched}", 3)
|
||||||
|
|
||||||
|
logger("Cleaning Server 1 Watched", 1)
|
||||||
|
server_1_watched_filtered = cleanup_watched(
|
||||||
|
server_1_watched, server_2_watched, user_mapping, library_mapping
|
||||||
|
)
|
||||||
|
|
||||||
|
logger("Cleaning Server 2 Watched", 1)
|
||||||
|
server_2_watched_filtered = cleanup_watched(
|
||||||
|
server_2_watched, server_1_watched, user_mapping, library_mapping
|
||||||
|
)
|
||||||
|
|
||||||
|
logger(
|
||||||
|
f"server 1 watched that needs to be synced to server 2:\n{server_1_watched_filtered}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
logger(
|
||||||
|
f"server 2 watched that needs to be synced to server 1:\n{server_2_watched_filtered}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
|
||||||
|
if should_sync_server(server_1[0], server_2[0]):
|
||||||
|
update_server_watched(
|
||||||
|
server_1,
|
||||||
|
server_2_watched_filtered,
|
||||||
|
user_mapping,
|
||||||
|
library_mapping,
|
||||||
|
dryrun,
|
||||||
|
)
|
||||||
|
|
||||||
|
if should_sync_server(server_2[0], server_1[0]):
|
||||||
|
update_server_watched(
|
||||||
|
server_2,
|
||||||
|
server_1_watched_filtered,
|
||||||
|
user_mapping,
|
||||||
|
library_mapping,
|
||||||
|
dryrun,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
sleep_duration = float(os.getenv("SLEEP_DURATION", "3600"))
|
||||||
|
times = []
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
start = perf_counter()
|
||||||
|
main_loop()
|
||||||
|
end = perf_counter()
|
||||||
|
times.append(end - start)
|
||||||
|
|
||||||
|
if len(times) > 0:
|
||||||
|
logger(f"Average time: {sum(times) / len(times)}", 0)
|
||||||
|
|
||||||
|
logger(f"Looping in {sleep_duration}")
|
||||||
|
sleep(sleep_duration)
|
||||||
|
|
||||||
|
except Exception as error:
|
||||||
|
if isinstance(error, list):
|
||||||
|
for message in error:
|
||||||
|
logger(message, log_type=2)
|
||||||
|
else:
|
||||||
|
logger(error, log_type=2)
|
||||||
|
|
||||||
|
logger(traceback.format_exc(), 2)
|
||||||
|
|
||||||
|
logger(f"Retrying in {sleep_duration}", log_type=0)
|
||||||
|
sleep(sleep_duration)
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
logger("Exiting", log_type=0)
|
||||||
|
os._exit(0)
|
||||||
674
src/plex.py
674
src/plex.py
@@ -1,238 +1,436 @@
|
|||||||
import re, os
|
import re, requests, os, traceback
|
||||||
from dotenv import load_dotenv
|
from urllib3.poolmanager import PoolManager
|
||||||
|
|
||||||
from src.functions import logger, search_mapping, check_skip_logic, generate_library_guids_dict
|
from plexapi.server import PlexServer
|
||||||
from plexapi.server import PlexServer
|
from plexapi.myplex import MyPlexAccount
|
||||||
from plexapi.myplex import MyPlexAccount
|
|
||||||
|
from src.functions import (
|
||||||
load_dotenv(override=True)
|
logger,
|
||||||
|
search_mapping,
|
||||||
plex_baseurl = os.getenv("PLEX_BASEURL")
|
future_thread_executor,
|
||||||
plex_token = os.getenv("PLEX_TOKEN")
|
)
|
||||||
username = os.getenv("PLEX_USERNAME")
|
from src.library import (
|
||||||
password = os.getenv("PLEX_PASSWORD")
|
check_skip_logic,
|
||||||
servername = os.getenv("PLEX_SERVERNAME")
|
generate_library_guids_dict,
|
||||||
|
)
|
||||||
# class plex accept base url and token and username and password but default with none
|
|
||||||
class Plex:
|
|
||||||
def __init__(self):
|
# Bypass hostname validation for ssl. Taken from https://github.com/pkkid/python-plexapi/issues/143#issuecomment-775485186
|
||||||
self.baseurl = plex_baseurl
|
class HostNameIgnoringAdapter(requests.adapters.HTTPAdapter):
|
||||||
self.token = plex_token
|
def init_poolmanager(self, connections, maxsize, block=..., **pool_kwargs):
|
||||||
self.username = username
|
self.poolmanager = PoolManager(
|
||||||
self.password = password
|
num_pools=connections,
|
||||||
self.servername = servername
|
maxsize=maxsize,
|
||||||
self.plex = self.plex_login()
|
block=block,
|
||||||
self.admin_user = self.plex.myPlexAccount()
|
assert_hostname=False,
|
||||||
self.users = self.get_plex_users()
|
**pool_kwargs,
|
||||||
|
)
|
||||||
def plex_login(self):
|
|
||||||
try:
|
|
||||||
if self.baseurl and self.token:
|
def get_user_library_watched_show(show):
|
||||||
# Login via token
|
try:
|
||||||
plex = PlexServer(self.baseurl, self.token)
|
show_guids = {}
|
||||||
elif self.username and self.password and self.servername:
|
for show_guid in show.guids:
|
||||||
# Login via plex account
|
# Extract source and id from guid.id
|
||||||
account = MyPlexAccount(self.username, self.password)
|
m = re.match(r"(.*)://(.*)", show_guid.id)
|
||||||
plex = account.resource(self.servername).connect()
|
show_guid_source, show_guid_id = m.group(1).lower(), m.group(2)
|
||||||
else:
|
show_guids[show_guid_source] = show_guid_id
|
||||||
raise Exception("No complete plex credentials provided")
|
|
||||||
|
show_guids["title"] = show.title
|
||||||
return plex
|
show_guids["locations"] = tuple([x.split("/")[-1] for x in show.locations])
|
||||||
except Exception as e:
|
show_guids = frozenset(show_guids.items())
|
||||||
if self.username or self.password:
|
|
||||||
msg = f"Failed to login via plex account {self.username}"
|
# Get all watched episodes for show
|
||||||
logger(f"Plex: Failed to login, {msg}, Error: {e}", 2)
|
episode_guids = {}
|
||||||
else:
|
watched_episodes = show.watched()
|
||||||
logger(f"Plex: Failed to login, Error: {e}", 2)
|
for episode in watched_episodes:
|
||||||
return None
|
episode_guids_temp = {}
|
||||||
|
try:
|
||||||
|
if len(episode.guids) > 0:
|
||||||
def get_plex_users(self):
|
for guid in episode.guids:
|
||||||
users = self.plex.myPlexAccount().users()
|
# Extract after :// from guid.id
|
||||||
|
m = re.match(r"(.*)://(.*)", guid.id)
|
||||||
# append self to users
|
guid_source, guid_id = m.group(1).lower(), m.group(2)
|
||||||
users.append(self.plex.myPlexAccount())
|
episode_guids_temp[guid_source] = guid_id
|
||||||
|
except Exception:
|
||||||
return users
|
logger(
|
||||||
|
f"Plex: Failed to get guids for {episode.title} in {show.title}, Using location only",
|
||||||
def get_plex_user_watched(self, user, library):
|
1,
|
||||||
if self.admin_user == user:
|
)
|
||||||
user_plex = self.plex
|
|
||||||
else:
|
episode_guids_temp["locations"] = tuple(
|
||||||
user_plex = PlexServer(self.baseurl, user.get_token(self.plex.machineIdentifier))
|
[x.split("/")[-1] for x in episode.locations]
|
||||||
|
)
|
||||||
watched = None
|
|
||||||
|
if episode.parentTitle not in episode_guids:
|
||||||
if library.type == "movie":
|
episode_guids[episode.parentTitle] = []
|
||||||
watched = []
|
|
||||||
library_videos = user_plex.library.section(library.title)
|
episode_guids[episode.parentTitle].append(episode_guids_temp)
|
||||||
for video in library_videos.search(unmatched=False, unwatched=False):
|
|
||||||
guids = {}
|
return show_guids, episode_guids
|
||||||
for guid in video.guids:
|
|
||||||
guid_source = re.search(r'(.*)://', guid.id).group(1).lower()
|
except Exception:
|
||||||
guid_id = re.search(r'://(.*)', guid.id).group(1)
|
return {}, {}
|
||||||
guids[guid_source] = guid_id
|
|
||||||
watched.append(guids)
|
|
||||||
|
def get_user_library_watched(user, user_plex, library):
|
||||||
elif library.type == "show":
|
try:
|
||||||
watched = {}
|
user_name = user.title.lower()
|
||||||
library_videos = user_plex.library.section(library.title)
|
user_watched = {}
|
||||||
for show in library_videos.search(unmatched=False, unwatched=False):
|
user_watched[user_name] = {}
|
||||||
show_guids = {}
|
|
||||||
for show_guid in show.guids:
|
logger(
|
||||||
show_guids["title"] = show.title
|
f"Plex: Generating watched for {user_name} in library {library.title}",
|
||||||
# Extract after :// from guid.id
|
0,
|
||||||
show_guid_source = re.search(r'(.*)://', show_guid.id).group(1).lower()
|
)
|
||||||
show_guid_id = re.search(r'://(.*)', show_guid.id).group(1)
|
|
||||||
show_guids[show_guid_source] = show_guid_id
|
library_videos = user_plex.library.section(library.title)
|
||||||
show_guids = frozenset(show_guids.items())
|
|
||||||
|
if library.type == "movie":
|
||||||
for season in show.seasons():
|
user_watched[user_name][library.title] = []
|
||||||
episode_guids = []
|
|
||||||
for episode in season.episodes():
|
for video in library_videos.search(unwatched=False):
|
||||||
if episode.viewCount > 0:
|
logger(f"Plex: Adding {video.title} to {user_name} watched list", 3)
|
||||||
episode_guids_temp = {}
|
logger(f"Plex: {video.title} {video.guids} {video.locations}", 3)
|
||||||
for guid in episode.guids:
|
|
||||||
# Extract after :// from guid.id
|
movie_guids = {}
|
||||||
guid_source = re.search(r'(.*)://', guid.id).group(1).lower()
|
for guid in video.guids:
|
||||||
guid_id = re.search(r'://(.*)', guid.id).group(1)
|
# Extract source and id from guid.id
|
||||||
episode_guids_temp[guid_source] = guid_id
|
m = re.match(r"(.*)://(.*)", guid.id)
|
||||||
|
guid_source, guid_id = m.group(1).lower(), m.group(2)
|
||||||
episode_guids.append(episode_guids_temp)
|
movie_guids[guid_source] = guid_id
|
||||||
|
|
||||||
if episode_guids:
|
movie_guids["title"] = video.title
|
||||||
# append show, season, episode
|
movie_guids["locations"] = tuple(
|
||||||
if show_guids not in watched:
|
[x.split("/")[-1] for x in video.locations]
|
||||||
watched[show_guids] = {}
|
)
|
||||||
if season.title not in watched[show_guids]:
|
|
||||||
watched[show_guids][season.title] = {}
|
user_watched[user_name][library.title].append(movie_guids)
|
||||||
watched[show_guids][season.title] = episode_guids
|
logger(f"Plex: Added {movie_guids} to {user_name} watched list", 3)
|
||||||
|
|
||||||
return watched
|
elif library.type == "show":
|
||||||
|
user_watched[user_name][library.title] = {}
|
||||||
def get_plex_watched(self, users, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping):
|
shows = library_videos.search(unwatched=False)
|
||||||
# Get all libraries
|
|
||||||
libraries = self.plex.library.sections()
|
# Parallelize show processing
|
||||||
users_watched = {}
|
args = []
|
||||||
|
for show in shows:
|
||||||
# for not in blacklist
|
args.append([get_user_library_watched_show, show])
|
||||||
for library in libraries:
|
|
||||||
library_title = library.title
|
for show_guids, episode_guids in future_thread_executor(
|
||||||
library_type = library.type
|
args, workers=min(os.cpu_count(), 4)
|
||||||
|
):
|
||||||
skip_reason = check_skip_logic(library_title, library_type, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping)
|
if show_guids and episode_guids:
|
||||||
|
# append show, season, episode
|
||||||
if skip_reason:
|
if show_guids not in user_watched[user_name][library.title]:
|
||||||
logger(f"Plex: Skipping library {library_title} {skip_reason}", 1)
|
user_watched[user_name][library.title][show_guids] = {}
|
||||||
continue
|
|
||||||
|
user_watched[user_name][library.title][show_guids] = episode_guids
|
||||||
for user in users:
|
logger(
|
||||||
logger(f"Plex: Generating watched for {user.title} in library {library_title}", 0)
|
f"Plex: Added {episode_guids} to {user_name} {show_guids} watched list",
|
||||||
user_name = user.title.lower()
|
3,
|
||||||
watched = self.get_plex_user_watched(user, library)
|
)
|
||||||
if watched:
|
|
||||||
if user_name not in users_watched:
|
logger(f"Plex: Got watched for {user_name} in library {library.title}", 1)
|
||||||
users_watched[user_name] = {}
|
if library.title in user_watched[user_name]:
|
||||||
if library_title not in users_watched[user_name]:
|
logger(f"Plex: {user_watched[user_name][library.title]}", 3)
|
||||||
users_watched[user_name][library_title] = []
|
|
||||||
users_watched[user_name][library_title] = watched
|
return user_watched
|
||||||
|
except Exception as e:
|
||||||
return users_watched
|
logger(
|
||||||
|
f"Plex: Failed to get watched for {user_name} in library {library.title}, Error: {e}",
|
||||||
def update_watched(self, watched_list, user_mapping=None, library_mapping=None, dryrun=False):
|
2,
|
||||||
for user, libraries in watched_list.items():
|
)
|
||||||
if user_mapping:
|
return {}
|
||||||
user_other = None
|
|
||||||
|
|
||||||
if user in user_mapping.keys():
|
def find_video(plex_search, video_ids):
|
||||||
user_other = user_mapping[user]
|
try:
|
||||||
elif user in user_mapping.values():
|
for location in plex_search.locations:
|
||||||
user_other = search_mapping(user_mapping, user)
|
if location.split("/")[-1] in video_ids["locations"]:
|
||||||
|
return True
|
||||||
if user_other:
|
|
||||||
logger(f"Swapping user {user} with {user_other}", 1)
|
for guid in plex_search.guids:
|
||||||
user = user_other
|
guid_source = re.search(r"(.*)://", guid.id).group(1).lower()
|
||||||
|
guid_id = re.search(r"://(.*)", guid.id).group(1)
|
||||||
for index, value in enumerate(self.users):
|
|
||||||
if user.lower() == value.title.lower():
|
# If show provider source and show provider id are in videos_shows_ids exactly, then the show is in the list
|
||||||
user = self.users[index]
|
if guid_source in video_ids.keys():
|
||||||
break
|
if guid_id in video_ids[guid_source]:
|
||||||
|
return True
|
||||||
if self.admin_user == user:
|
|
||||||
user_plex = self.plex
|
return False
|
||||||
else:
|
except Exception:
|
||||||
user_plex = PlexServer(self.baseurl, user.get_token(self.plex.machineIdentifier))
|
return False
|
||||||
|
|
||||||
for library, videos in libraries.items():
|
|
||||||
if library_mapping:
|
def update_user_watched(user, user_plex, library, videos, dryrun):
|
||||||
library_other = None
|
try:
|
||||||
|
logger(f"Plex: Updating watched for {user.title} in library {library}", 1)
|
||||||
if library in library_mapping.keys():
|
(
|
||||||
library_other = library_mapping[library]
|
videos_shows_ids,
|
||||||
elif library in library_mapping.values():
|
videos_episodes_ids,
|
||||||
library_other = search_mapping(library_mapping, library)
|
videos_movies_ids,
|
||||||
|
) = generate_library_guids_dict(videos)
|
||||||
if library_other:
|
logger(
|
||||||
logger(f"Swapping library {library} with {library_other}", 1)
|
f"Plex: mark list\nShows: {videos_shows_ids}\nEpisodes: {videos_episodes_ids}\nMovies: {videos_movies_ids}",
|
||||||
library = library_other
|
1,
|
||||||
|
)
|
||||||
# if library in plex library list
|
|
||||||
library_list = user_plex.library.sections()
|
library_videos = user_plex.library.section(library)
|
||||||
if library.lower() not in [x.title.lower() for x in library_list]:
|
if videos_movies_ids:
|
||||||
logger(f"Library {library} not found in Plex library list", 2)
|
for movies_search in library_videos.search(unwatched=True):
|
||||||
continue
|
if find_video(movies_search, videos_movies_ids):
|
||||||
|
msg = f"{movies_search.title} as watched for {user.title} in {library} for Plex"
|
||||||
logger(f"Plex: Updating watched for {user.title} in library {library}", 1)
|
if not dryrun:
|
||||||
library_videos = user_plex.library.section(library)
|
logger(f"Marked {msg}", 0)
|
||||||
|
movies_search.markWatched()
|
||||||
if library_videos.type == "movie":
|
else:
|
||||||
_, _, videos_movies_ids = generate_library_guids_dict(videos, 2)
|
logger(f"Dryrun {msg}", 0)
|
||||||
for movies_search in library_videos.search(unmatched=False, unwatched=True):
|
else:
|
||||||
for movie_guid in movies_search.guids:
|
logger(
|
||||||
movie_guid_source = re.search(r'(.*)://', movie_guid.id).group(1).lower()
|
f"Plex: Skipping movie {movies_search.title} as it is not in mark list for {user.title}",
|
||||||
movie_guid_id = re.search(r'://(.*)', movie_guid.id).group(1)
|
1,
|
||||||
# If movie provider source and movie provider id are in videos_movie_ids exactly, then the movie is in the list
|
)
|
||||||
if movie_guid_source in videos_movies_ids.keys():
|
|
||||||
if movie_guid_id in videos_movies_ids[movie_guid_source]:
|
if videos_shows_ids and videos_episodes_ids:
|
||||||
if movies_search.viewCount == 0:
|
for show_search in library_videos.search(unwatched=True):
|
||||||
msg = f"{movies_search.title} as watched for {user.title} in {library} for Plex"
|
if find_video(show_search, videos_shows_ids):
|
||||||
if not dryrun:
|
for episode_search in show_search.episodes():
|
||||||
logger(f"Marked {msg}", 0)
|
if find_video(episode_search, videos_episodes_ids):
|
||||||
movies_search.markWatched()
|
msg = f"{show_search.title} {episode_search.title} as watched for {user.title} in {library} for Plex"
|
||||||
else:
|
if not dryrun:
|
||||||
logger(f"Dryrun {msg}", 0)
|
logger(f"Marked {msg}", 0)
|
||||||
break
|
episode_search.markWatched()
|
||||||
|
else:
|
||||||
|
logger(f"Dryrun {msg}", 0)
|
||||||
elif library_videos.type == "show":
|
else:
|
||||||
videos_shows_ids, videos_episode_ids, _ = generate_library_guids_dict(videos, 3)
|
logger(
|
||||||
|
f"Plex: Skipping episode {episode_search.title} as it is not in mark list for {user.title}",
|
||||||
for show_search in library_videos.search(unmatched=False, unwatched=True):
|
3,
|
||||||
show_found = False
|
)
|
||||||
for show_guid in show_search.guids:
|
else:
|
||||||
show_guid_source = re.search(r'(.*)://', show_guid.id).group(1).lower()
|
logger(
|
||||||
show_guid_id = re.search(r'://(.*)', show_guid.id).group(1)
|
f"Plex: Skipping show {show_search.title} as it is not in mark list for {user.title}",
|
||||||
|
3,
|
||||||
# If show provider source and show provider id are in videos_shows_ids exactly, then the show is in the list
|
)
|
||||||
if show_guid_source in videos_shows_ids.keys():
|
|
||||||
if show_guid_id in videos_shows_ids[show_guid_source]:
|
if not videos_movies_ids and not videos_shows_ids and not videos_episodes_ids:
|
||||||
show_found = True
|
logger(
|
||||||
for episode_search in show_search.episodes():
|
f"Jellyfin: No videos to mark as watched for {user.title} in library {library}",
|
||||||
for episode_guid in episode_search.guids:
|
1,
|
||||||
episode_guid_source = re.search(r'(.*)://', episode_guid.id).group(1).lower()
|
)
|
||||||
episode_guid_id = re.search(r'://(.*)', episode_guid.id).group(1)
|
|
||||||
|
except Exception as e:
|
||||||
# If episode provider source and episode provider id are in videos_episode_ids exactly, then the episode is in the list
|
logger(
|
||||||
if episode_guid_source in videos_episode_ids.keys():
|
f"Plex: Failed to update watched for {user.title} in library {library}, Error: {e}",
|
||||||
if episode_guid_id in videos_episode_ids[episode_guid_source]:
|
2,
|
||||||
if episode_search.viewCount == 0:
|
)
|
||||||
msg = f"{show_search.title} {episode_search.title} as watched for {user.title} in {library} for Plex"
|
logger(traceback.format_exc(), 2)
|
||||||
if not dryrun:
|
|
||||||
logger(f"Marked {msg}", 0)
|
|
||||||
episode_search.markWatched()
|
# class plex accept base url and token and username and password but default with none
|
||||||
else:
|
class Plex:
|
||||||
logger(f"Dryrun {msg}", 0)
|
def __init__(
|
||||||
break
|
self,
|
||||||
|
baseurl=None,
|
||||||
if show_found:
|
token=None,
|
||||||
break
|
username=None,
|
||||||
|
password=None,
|
||||||
|
servername=None,
|
||||||
|
ssl_bypass=False,
|
||||||
|
session=None,
|
||||||
|
):
|
||||||
|
self.baseurl = baseurl
|
||||||
|
self.token = token
|
||||||
|
self.username = username
|
||||||
|
self.password = password
|
||||||
|
self.servername = servername
|
||||||
|
self.ssl_bypass = ssl_bypass
|
||||||
|
if ssl_bypass:
|
||||||
|
# Session for ssl bypass
|
||||||
|
session = requests.Session()
|
||||||
|
# By pass ssl hostname check https://github.com/pkkid/python-plexapi/issues/143#issuecomment-775485186
|
||||||
|
session.mount("https://", HostNameIgnoringAdapter())
|
||||||
|
self.session = session
|
||||||
|
self.plex = self.login(self.baseurl, self.token)
|
||||||
|
self.admin_user = self.plex.myPlexAccount()
|
||||||
|
self.users = self.get_users()
|
||||||
|
|
||||||
|
def login(self, baseurl, token):
|
||||||
|
try:
|
||||||
|
if baseurl and token:
|
||||||
|
plex = PlexServer(baseurl, token, session=self.session)
|
||||||
|
elif self.username and self.password and self.servername:
|
||||||
|
# Login via plex account
|
||||||
|
account = MyPlexAccount(self.username, self.password)
|
||||||
|
plex = account.resource(self.servername).connect()
|
||||||
|
else:
|
||||||
|
raise Exception("No complete plex credentials provided")
|
||||||
|
|
||||||
|
return plex
|
||||||
|
except Exception as e:
|
||||||
|
if self.username or self.password:
|
||||||
|
msg = f"Failed to login via plex account {self.username}"
|
||||||
|
logger(f"Plex: Failed to login, {msg}, Error: {e}", 2)
|
||||||
|
else:
|
||||||
|
logger(f"Plex: Failed to login, Error: {e}", 2)
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
def get_users(self):
|
||||||
|
try:
|
||||||
|
users = self.plex.myPlexAccount().users()
|
||||||
|
|
||||||
|
# append self to users
|
||||||
|
users.append(self.plex.myPlexAccount())
|
||||||
|
|
||||||
|
return users
|
||||||
|
except Exception as e:
|
||||||
|
logger(f"Plex: Failed to get users, Error: {e}", 2)
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
def get_watched(
|
||||||
|
self,
|
||||||
|
users,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
# Get all libraries
|
||||||
|
users_watched = {}
|
||||||
|
args = []
|
||||||
|
|
||||||
|
for user in users:
|
||||||
|
if self.admin_user == user:
|
||||||
|
user_plex = self.plex
|
||||||
|
else:
|
||||||
|
user_plex = self.login(
|
||||||
|
self.plex._baseurl,
|
||||||
|
user.get_token(self.plex.machineIdentifier),
|
||||||
|
)
|
||||||
|
|
||||||
|
libraries = user_plex.library.sections()
|
||||||
|
|
||||||
|
for library in libraries:
|
||||||
|
library_title = library.title
|
||||||
|
library_type = library.type
|
||||||
|
|
||||||
|
skip_reason = check_skip_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
if skip_reason:
|
||||||
|
logger(
|
||||||
|
f"Plex: Skipping library {library_title}: {skip_reason}", 1
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
args.append([get_user_library_watched, user, user_plex, library])
|
||||||
|
|
||||||
|
for user_watched in future_thread_executor(args):
|
||||||
|
for user, user_watched_temp in user_watched.items():
|
||||||
|
if user not in users_watched:
|
||||||
|
users_watched[user] = {}
|
||||||
|
users_watched[user].update(user_watched_temp)
|
||||||
|
|
||||||
|
return users_watched
|
||||||
|
except Exception as e:
|
||||||
|
logger(f"Plex: Failed to get watched, Error: {e}", 2)
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
def update_watched(
|
||||||
|
self, watched_list, user_mapping=None, library_mapping=None, dryrun=False
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
args = []
|
||||||
|
|
||||||
|
for user, libraries in watched_list.items():
|
||||||
|
user_other = None
|
||||||
|
# If type of user is dict
|
||||||
|
if user_mapping:
|
||||||
|
if user in user_mapping.keys():
|
||||||
|
user_other = user_mapping[user]
|
||||||
|
elif user in user_mapping.values():
|
||||||
|
user_other = search_mapping(user_mapping, user)
|
||||||
|
|
||||||
|
for index, value in enumerate(self.users):
|
||||||
|
if user.lower() == value.title.lower():
|
||||||
|
user = self.users[index]
|
||||||
|
break
|
||||||
|
elif user_other and user_other.lower() == value.title.lower():
|
||||||
|
user = self.users[index]
|
||||||
|
break
|
||||||
|
|
||||||
|
if self.admin_user == user:
|
||||||
|
user_plex = self.plex
|
||||||
|
else:
|
||||||
|
if isinstance(user, str):
|
||||||
|
logger(
|
||||||
|
f"Plex: {user} is not a plex object, attempting to get object for user",
|
||||||
|
4,
|
||||||
|
)
|
||||||
|
user = self.plex.myPlexAccount().user(user)
|
||||||
|
|
||||||
|
user_plex = PlexServer(
|
||||||
|
self.plex._baseurl,
|
||||||
|
user.get_token(self.plex.machineIdentifier),
|
||||||
|
session=self.session,
|
||||||
|
)
|
||||||
|
|
||||||
|
for library, videos in libraries.items():
|
||||||
|
library_other = None
|
||||||
|
if library_mapping:
|
||||||
|
if library in library_mapping.keys():
|
||||||
|
library_other = library_mapping[library]
|
||||||
|
elif library in library_mapping.values():
|
||||||
|
library_other = search_mapping(library_mapping, library)
|
||||||
|
|
||||||
|
# if library in plex library list
|
||||||
|
library_list = user_plex.library.sections()
|
||||||
|
if library.lower() not in [x.title.lower() for x in library_list]:
|
||||||
|
if library_other:
|
||||||
|
if library_other.lower() in [
|
||||||
|
x.title.lower() for x in library_list
|
||||||
|
]:
|
||||||
|
logger(
|
||||||
|
f"Plex: Library {library} not found, but {library_other} found, using {library_other}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
library = library_other
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Plex: Library {library} or {library_other} not found in library list",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Plex: Library {library} not found in library list",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
args.append(
|
||||||
|
[
|
||||||
|
update_user_watched,
|
||||||
|
user,
|
||||||
|
user_plex,
|
||||||
|
library,
|
||||||
|
videos,
|
||||||
|
dryrun,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
future_thread_executor(args)
|
||||||
|
except Exception as e:
|
||||||
|
logger(f"Plex: Failed to update watched, Error: {e}", 2)
|
||||||
|
raise Exception(e)
|
||||||
|
|||||||
83
src/users.py
Normal file
83
src/users.py
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
from src.functions import (
|
||||||
|
logger,
|
||||||
|
search_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_user_list(server):
|
||||||
|
# generate list of users from server 1 and server 2
|
||||||
|
server_type = server[0]
|
||||||
|
server_connection = server[1]
|
||||||
|
|
||||||
|
server_users = []
|
||||||
|
if server_type == "plex":
|
||||||
|
server_users = [x.title.lower() for x in server_connection.users]
|
||||||
|
elif server_type == "jellyfin":
|
||||||
|
server_users = [key.lower() for key in server_connection.users.keys()]
|
||||||
|
|
||||||
|
return server_users
|
||||||
|
|
||||||
|
|
||||||
|
def combine_user_lists(server_1_users, server_2_users, user_mapping):
|
||||||
|
# combined list of overlapping users from plex and jellyfin
|
||||||
|
users = {}
|
||||||
|
|
||||||
|
for server_1_user in server_1_users:
|
||||||
|
if user_mapping:
|
||||||
|
mapped_user = search_mapping(user_mapping, server_1_user)
|
||||||
|
if mapped_user in server_2_users:
|
||||||
|
users[server_1_user] = mapped_user
|
||||||
|
continue
|
||||||
|
|
||||||
|
if server_1_user in server_2_users:
|
||||||
|
users[server_1_user] = server_1_user
|
||||||
|
|
||||||
|
for server_2_user in server_2_users:
|
||||||
|
if user_mapping:
|
||||||
|
mapped_user = search_mapping(user_mapping, server_2_user)
|
||||||
|
if mapped_user in server_1_users:
|
||||||
|
users[mapped_user] = server_2_user
|
||||||
|
continue
|
||||||
|
|
||||||
|
if server_2_user in server_1_users:
|
||||||
|
users[server_2_user] = server_2_user
|
||||||
|
|
||||||
|
return users
|
||||||
|
|
||||||
|
|
||||||
|
def filter_user_lists(users, blacklist_users, whitelist_users):
|
||||||
|
users_filtered = {}
|
||||||
|
for user in users:
|
||||||
|
# whitelist_user is not empty and user lowercase is not in whitelist lowercase
|
||||||
|
if len(whitelist_users) > 0:
|
||||||
|
if user not in whitelist_users and users[user] not in whitelist_users:
|
||||||
|
logger(f"{user} or {users[user]} is not in whitelist", 1)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if user not in blacklist_users and users[user] not in blacklist_users:
|
||||||
|
users_filtered[user] = users[user]
|
||||||
|
|
||||||
|
return users_filtered
|
||||||
|
|
||||||
|
|
||||||
|
def generate_server_users(server, users):
|
||||||
|
server_users = None
|
||||||
|
|
||||||
|
if server[0] == "plex":
|
||||||
|
server_users = []
|
||||||
|
for plex_user in server[1].users:
|
||||||
|
if (
|
||||||
|
plex_user.title.lower() in users.keys()
|
||||||
|
or plex_user.title.lower() in users.values()
|
||||||
|
):
|
||||||
|
server_users.append(plex_user)
|
||||||
|
elif server[0] == "jellyfin":
|
||||||
|
server_users = {}
|
||||||
|
for jellyfin_user, jellyfin_id in server[1].users.items():
|
||||||
|
if (
|
||||||
|
jellyfin_user.lower() in users.keys()
|
||||||
|
or jellyfin_user.lower() in users.values()
|
||||||
|
):
|
||||||
|
server_users[jellyfin_user] = jellyfin_id
|
||||||
|
|
||||||
|
return server_users
|
||||||
192
src/watched.py
Normal file
192
src/watched.py
Normal file
@@ -0,0 +1,192 @@
|
|||||||
|
import copy
|
||||||
|
|
||||||
|
from src.functions import (
|
||||||
|
logger,
|
||||||
|
search_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
from src.library import generate_library_guids_dict
|
||||||
|
|
||||||
|
|
||||||
|
def combine_watched_dicts(dicts: list):
|
||||||
|
combined_dict = {}
|
||||||
|
for single_dict in dicts:
|
||||||
|
for key, value in single_dict.items():
|
||||||
|
if key not in combined_dict:
|
||||||
|
combined_dict[key] = {}
|
||||||
|
for subkey, subvalue in value.items():
|
||||||
|
if subkey in combined_dict[key]:
|
||||||
|
# If the subkey already exists in the combined dictionary,
|
||||||
|
# check if the values are different and raise an exception if they are
|
||||||
|
if combined_dict[key][subkey] != subvalue:
|
||||||
|
raise ValueError(
|
||||||
|
f"Conflicting values for subkey '{subkey}' under key '{key}'"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# If the subkey does not exist in the combined dictionary, add it
|
||||||
|
combined_dict[key][subkey] = subvalue
|
||||||
|
|
||||||
|
return combined_dict
|
||||||
|
|
||||||
|
|
||||||
|
def cleanup_watched(
|
||||||
|
watched_list_1, watched_list_2, user_mapping=None, library_mapping=None
|
||||||
|
):
|
||||||
|
modified_watched_list_1 = copy.deepcopy(watched_list_1)
|
||||||
|
|
||||||
|
# remove entries from watched_list_1 that are in watched_list_2
|
||||||
|
for user_1 in watched_list_1:
|
||||||
|
user_other = None
|
||||||
|
if user_mapping:
|
||||||
|
user_other = search_mapping(user_mapping, user_1)
|
||||||
|
user_2 = get_other(watched_list_2, user_1, user_other)
|
||||||
|
if user_2 is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for library_1 in watched_list_1[user_1]:
|
||||||
|
library_other = None
|
||||||
|
if library_mapping:
|
||||||
|
library_other = search_mapping(library_mapping, library_1)
|
||||||
|
library_2 = get_other(watched_list_2[user_2], library_1, library_other)
|
||||||
|
if library_2 is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
(
|
||||||
|
_,
|
||||||
|
episode_watched_list_2_keys_dict,
|
||||||
|
movies_watched_list_2_keys_dict,
|
||||||
|
) = generate_library_guids_dict(watched_list_2[user_2][library_2])
|
||||||
|
|
||||||
|
# Movies
|
||||||
|
if isinstance(watched_list_1[user_1][library_1], list):
|
||||||
|
for movie in watched_list_1[user_1][library_1]:
|
||||||
|
if is_movie_in_dict(movie, movies_watched_list_2_keys_dict):
|
||||||
|
logger(f"Removing {movie} from {library_1}", 3)
|
||||||
|
modified_watched_list_1[user_1][library_1].remove(movie)
|
||||||
|
|
||||||
|
# TV Shows
|
||||||
|
elif isinstance(watched_list_1[user_1][library_1], dict):
|
||||||
|
for show_key_1 in watched_list_1[user_1][library_1].keys():
|
||||||
|
show_key_dict = dict(show_key_1)
|
||||||
|
for season in watched_list_1[user_1][library_1][show_key_1]:
|
||||||
|
for episode in watched_list_1[user_1][library_1][show_key_1][
|
||||||
|
season
|
||||||
|
]:
|
||||||
|
if is_episode_in_dict(
|
||||||
|
episode, episode_watched_list_2_keys_dict
|
||||||
|
):
|
||||||
|
if (
|
||||||
|
episode
|
||||||
|
in modified_watched_list_1[user_1][library_1][
|
||||||
|
show_key_1
|
||||||
|
][season]
|
||||||
|
):
|
||||||
|
logger(
|
||||||
|
f"Removing {episode} from {show_key_dict['title']}",
|
||||||
|
3,
|
||||||
|
)
|
||||||
|
modified_watched_list_1[user_1][library_1][
|
||||||
|
show_key_1
|
||||||
|
][season].remove(episode)
|
||||||
|
|
||||||
|
# Remove empty seasons
|
||||||
|
if (
|
||||||
|
len(
|
||||||
|
modified_watched_list_1[user_1][library_1][show_key_1][
|
||||||
|
season
|
||||||
|
]
|
||||||
|
)
|
||||||
|
== 0
|
||||||
|
):
|
||||||
|
if (
|
||||||
|
season
|
||||||
|
in modified_watched_list_1[user_1][library_1][
|
||||||
|
show_key_1
|
||||||
|
]
|
||||||
|
):
|
||||||
|
logger(
|
||||||
|
f"Removing {season} from {show_key_dict['title']} because it is empty",
|
||||||
|
3,
|
||||||
|
)
|
||||||
|
del modified_watched_list_1[user_1][library_1][
|
||||||
|
show_key_1
|
||||||
|
][season]
|
||||||
|
|
||||||
|
# Remove empty shows
|
||||||
|
if len(modified_watched_list_1[user_1][library_1][show_key_1]) == 0:
|
||||||
|
if show_key_1 in modified_watched_list_1[user_1][library_1]:
|
||||||
|
logger(
|
||||||
|
f"Removing {show_key_dict['title']} because it is empty",
|
||||||
|
3,
|
||||||
|
)
|
||||||
|
del modified_watched_list_1[user_1][library_1][show_key_1]
|
||||||
|
|
||||||
|
for user_1 in watched_list_1:
|
||||||
|
for library_1 in watched_list_1[user_1]:
|
||||||
|
if library_1 in modified_watched_list_1[user_1]:
|
||||||
|
# If library is empty then remove it
|
||||||
|
if len(modified_watched_list_1[user_1][library_1]) == 0:
|
||||||
|
logger(f"Removing {library_1} from {user_1} because it is empty", 1)
|
||||||
|
del modified_watched_list_1[user_1][library_1]
|
||||||
|
|
||||||
|
if user_1 in modified_watched_list_1:
|
||||||
|
# If user is empty delete user
|
||||||
|
if len(modified_watched_list_1[user_1]) == 0:
|
||||||
|
logger(f"Removing {user_1} from watched list 1 because it is empty", 1)
|
||||||
|
del modified_watched_list_1[user_1]
|
||||||
|
|
||||||
|
return modified_watched_list_1
|
||||||
|
|
||||||
|
|
||||||
|
def get_other(watched_list, object_1, object_2):
|
||||||
|
if object_1 in watched_list:
|
||||||
|
return object_1
|
||||||
|
elif object_2 in watched_list:
|
||||||
|
return object_2
|
||||||
|
else:
|
||||||
|
logger(f"{object_1} and {object_2} not found in watched list 2", 1)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def is_movie_in_dict(movie, movies_watched_list_2_keys_dict):
|
||||||
|
# Iterate through the keys and values of the movie dictionary
|
||||||
|
for movie_key, movie_value in movie.items():
|
||||||
|
# If the key is "locations", check if the "locations" key is present in the movies_watched_list_2_keys_dict dictionary
|
||||||
|
if movie_key == "locations":
|
||||||
|
if "locations" in movies_watched_list_2_keys_dict.keys():
|
||||||
|
# Iterate through the locations in the movie dictionary
|
||||||
|
for location in movie_value:
|
||||||
|
# If the location is in the movies_watched_list_2_keys_dict dictionary, return True
|
||||||
|
if location in movies_watched_list_2_keys_dict["locations"]:
|
||||||
|
return True
|
||||||
|
# If the key is not "locations", check if the movie_key is present in the movies_watched_list_2_keys_dict dictionary
|
||||||
|
else:
|
||||||
|
if movie_key in movies_watched_list_2_keys_dict.keys():
|
||||||
|
# If the movie_value is in the movies_watched_list_2_keys_dict dictionary, return True
|
||||||
|
if movie_value in movies_watched_list_2_keys_dict[movie_key]:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# If the loop completes without finding a match, return False
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def is_episode_in_dict(episode, episode_watched_list_2_keys_dict):
|
||||||
|
# Iterate through the keys and values of the episode dictionary
|
||||||
|
for episode_key, episode_value in episode.items():
|
||||||
|
# If the key is "locations", check if the "locations" key is present in the episode_watched_list_2_keys_dict dictionary
|
||||||
|
if episode_key == "locations":
|
||||||
|
if "locations" in episode_watched_list_2_keys_dict.keys():
|
||||||
|
# Iterate through the locations in the episode dictionary
|
||||||
|
for location in episode_value:
|
||||||
|
# If the location is in the episode_watched_list_2_keys_dict dictionary, return True
|
||||||
|
if location in episode_watched_list_2_keys_dict["locations"]:
|
||||||
|
return True
|
||||||
|
# If the key is not "locations", check if the episode_key is present in the episode_watched_list_2_keys_dict dictionary
|
||||||
|
else:
|
||||||
|
if episode_key in episode_watched_list_2_keys_dict.keys():
|
||||||
|
# If the episode_value is in the episode_watched_list_2_keys_dict dictionary, return True
|
||||||
|
if episode_value in episode_watched_list_2_keys_dict[episode_key]:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# If the loop completes without finding a match, return False
|
||||||
|
return False
|
||||||
1
test/requirements.txt
Normal file
1
test/requirements.txt
Normal file
@@ -0,0 +1 @@
|
|||||||
|
pytest
|
||||||
78
test/test_black_white.py
Normal file
78
test/test_black_white.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
# getting the name of the directory
|
||||||
|
# where the this file is present.
|
||||||
|
current = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
|
# Getting the parent directory name
|
||||||
|
# where the current directory is present.
|
||||||
|
parent = os.path.dirname(current)
|
||||||
|
|
||||||
|
# adding the parent directory to
|
||||||
|
# the sys.path.
|
||||||
|
sys.path.append(parent)
|
||||||
|
|
||||||
|
from src.black_white import setup_black_white_lists
|
||||||
|
|
||||||
|
|
||||||
|
def test_setup_black_white_lists():
|
||||||
|
# Simple
|
||||||
|
blacklist_library = "library1, library2"
|
||||||
|
whitelist_library = "library1, library2"
|
||||||
|
blacklist_library_type = "library_type1, library_type2"
|
||||||
|
whitelist_library_type = "library_type1, library_type2"
|
||||||
|
blacklist_users = "user1, user2"
|
||||||
|
whitelist_users = "user1, user2"
|
||||||
|
|
||||||
|
(
|
||||||
|
results_blacklist_library,
|
||||||
|
return_whitelist_library,
|
||||||
|
return_blacklist_library_type,
|
||||||
|
return_whitelist_library_type,
|
||||||
|
return_blacklist_users,
|
||||||
|
return_whitelist_users,
|
||||||
|
) = setup_black_white_lists(
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
blacklist_users,
|
||||||
|
whitelist_users,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert results_blacklist_library == ["library1", "library2"]
|
||||||
|
assert return_whitelist_library == ["library1", "library2"]
|
||||||
|
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
||||||
|
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
||||||
|
assert return_blacklist_users == ["user1", "user2"]
|
||||||
|
assert return_whitelist_users == ["user1", "user2"]
|
||||||
|
|
||||||
|
# Library Mapping and user mapping
|
||||||
|
library_mapping = {"library1": "library3"}
|
||||||
|
user_mapping = {"user1": "user3"}
|
||||||
|
|
||||||
|
(
|
||||||
|
results_blacklist_library,
|
||||||
|
return_whitelist_library,
|
||||||
|
return_blacklist_library_type,
|
||||||
|
return_whitelist_library_type,
|
||||||
|
return_blacklist_users,
|
||||||
|
return_whitelist_users,
|
||||||
|
) = setup_black_white_lists(
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
blacklist_users,
|
||||||
|
whitelist_users,
|
||||||
|
library_mapping,
|
||||||
|
user_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert results_blacklist_library == ["library1", "library2", "library3"]
|
||||||
|
assert return_whitelist_library == ["library1", "library2", "library3"]
|
||||||
|
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
||||||
|
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
||||||
|
assert return_blacklist_users == ["user1", "user2", "user3"]
|
||||||
|
assert return_whitelist_users == ["user1", "user2", "user3"]
|
||||||
302
test/test_library.py
Normal file
302
test/test_library.py
Normal file
@@ -0,0 +1,302 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
# getting the name of the directory
|
||||||
|
# where the this file is present.
|
||||||
|
current = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
|
# Getting the parent directory name
|
||||||
|
# where the current directory is present.
|
||||||
|
parent = os.path.dirname(current)
|
||||||
|
|
||||||
|
# adding the parent directory to
|
||||||
|
# the sys.path.
|
||||||
|
sys.path.append(parent)
|
||||||
|
|
||||||
|
from src.functions import (
|
||||||
|
search_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
from src.library import (
|
||||||
|
check_skip_logic,
|
||||||
|
check_blacklist_logic,
|
||||||
|
check_whitelist_logic,
|
||||||
|
show_title_dict,
|
||||||
|
episode_title_dict,
|
||||||
|
movies_title_dict,
|
||||||
|
generate_library_guids_dict,
|
||||||
|
)
|
||||||
|
|
||||||
|
blacklist_library = ["TV Shows"]
|
||||||
|
whitelist_library = ["Movies"]
|
||||||
|
blacklist_library_type = ["episodes"]
|
||||||
|
whitelist_library_type = ["movies"]
|
||||||
|
library_mapping = {"Shows": "TV Shows", "Movie": "Movies"}
|
||||||
|
|
||||||
|
show_list = {
|
||||||
|
frozenset(
|
||||||
|
{
|
||||||
|
("locations", ("The Last of Us",)),
|
||||||
|
("tmdb", "100088"),
|
||||||
|
("imdb", "tt3581920"),
|
||||||
|
("tvdb", "392256"),
|
||||||
|
("title", "The Last of Us"),
|
||||||
|
}
|
||||||
|
): {
|
||||||
|
"Season 1": [
|
||||||
|
{
|
||||||
|
"imdb": "tt11957006",
|
||||||
|
"tmdb": "2181581",
|
||||||
|
"tvdb": "8444132",
|
||||||
|
"locations": (
|
||||||
|
"The Last of Us - S01E01 - When You're Lost in the Darkness WEBDL-1080p.mkv",
|
||||||
|
),
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
movie_list = [
|
||||||
|
{
|
||||||
|
"title": "Coco",
|
||||||
|
"imdb": "tt2380307",
|
||||||
|
"tmdb": "354912",
|
||||||
|
"locations": ("Coco (2017) Remux-2160p.mkv", "Coco (2017) Remux-1080p.mkv"),
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
show_titles = {
|
||||||
|
"imdb": ["tt3581920"],
|
||||||
|
"locations": ["The Last of Us"],
|
||||||
|
"tmdb": ["100088"],
|
||||||
|
"tvdb": ["392256"],
|
||||||
|
}
|
||||||
|
episode_titles = {
|
||||||
|
"imdb": ["tt11957006"],
|
||||||
|
"locations": [
|
||||||
|
"The Last of Us - S01E01 - When You're Lost in the Darkness WEBDL-1080p.mkv"
|
||||||
|
],
|
||||||
|
"tmdb": ["2181581"],
|
||||||
|
"tvdb": ["8444132"],
|
||||||
|
}
|
||||||
|
movie_titles = {
|
||||||
|
"imdb": ["tt2380307"],
|
||||||
|
"locations": ["Coco (2017) Remux-2160p.mkv", "Coco (2017) Remux-1080p.mkv"],
|
||||||
|
"title": ["coco"],
|
||||||
|
"tmdb": ["354912"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_check_skip_logic():
|
||||||
|
# Failes
|
||||||
|
library_title = "Test"
|
||||||
|
library_type = "movies"
|
||||||
|
skip_reason = check_skip_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert skip_reason == "Test is not in whitelist_library"
|
||||||
|
|
||||||
|
library_title = "Shows"
|
||||||
|
library_type = "episodes"
|
||||||
|
skip_reason = check_skip_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
skip_reason
|
||||||
|
== "episodes is in blacklist_library_type and TV Shows is in blacklist_library and "
|
||||||
|
+ "episodes is not in whitelist_library_type and Shows is not in whitelist_library"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Passes
|
||||||
|
library_title = "Movie"
|
||||||
|
library_type = "movies"
|
||||||
|
skip_reason = check_skip_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert skip_reason == None
|
||||||
|
|
||||||
|
|
||||||
|
def test_check_blacklist_logic():
|
||||||
|
# Fails
|
||||||
|
library_title = "Shows"
|
||||||
|
library_type = "episodes"
|
||||||
|
library_other = search_mapping(library_mapping, library_title)
|
||||||
|
skip_reason = check_blacklist_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
blacklist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
library_other,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
skip_reason
|
||||||
|
== "episodes is in blacklist_library_type and TV Shows is in blacklist_library"
|
||||||
|
)
|
||||||
|
|
||||||
|
library_title = "TV Shows"
|
||||||
|
library_type = "episodes"
|
||||||
|
library_other = search_mapping(library_mapping, library_title)
|
||||||
|
skip_reason = check_blacklist_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
blacklist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
library_other,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
skip_reason
|
||||||
|
== "episodes is in blacklist_library_type and TV Shows is in blacklist_library"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Passes
|
||||||
|
library_title = "Movie"
|
||||||
|
library_type = "movies"
|
||||||
|
library_other = search_mapping(library_mapping, library_title)
|
||||||
|
skip_reason = check_blacklist_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
blacklist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
library_other,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert skip_reason == None
|
||||||
|
|
||||||
|
library_title = "Movies"
|
||||||
|
library_type = "movies"
|
||||||
|
library_other = search_mapping(library_mapping, library_title)
|
||||||
|
skip_reason = check_blacklist_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
blacklist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
library_other,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert skip_reason == None
|
||||||
|
|
||||||
|
|
||||||
|
def test_check_whitelist_logic():
|
||||||
|
# Fails
|
||||||
|
library_title = "Shows"
|
||||||
|
library_type = "episodes"
|
||||||
|
library_other = search_mapping(library_mapping, library_title)
|
||||||
|
skip_reason = check_whitelist_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
whitelist_library,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_other,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
skip_reason
|
||||||
|
== "episodes is not in whitelist_library_type and Shows is not in whitelist_library"
|
||||||
|
)
|
||||||
|
|
||||||
|
library_title = "TV Shows"
|
||||||
|
library_type = "episodes"
|
||||||
|
library_other = search_mapping(library_mapping, library_title)
|
||||||
|
skip_reason = check_whitelist_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
whitelist_library,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_other,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
skip_reason
|
||||||
|
== "episodes is not in whitelist_library_type and TV Shows is not in whitelist_library"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Passes
|
||||||
|
library_title = "Movie"
|
||||||
|
library_type = "movies"
|
||||||
|
library_other = search_mapping(library_mapping, library_title)
|
||||||
|
skip_reason = check_whitelist_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
whitelist_library,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_other,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert skip_reason == None
|
||||||
|
|
||||||
|
library_title = "Movies"
|
||||||
|
library_type = "movies"
|
||||||
|
library_other = search_mapping(library_mapping, library_title)
|
||||||
|
skip_reason = check_whitelist_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
whitelist_library,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_other,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert skip_reason == None
|
||||||
|
|
||||||
|
|
||||||
|
def test_show_title_dict():
|
||||||
|
show_titles_dict = show_title_dict(show_list)
|
||||||
|
|
||||||
|
assert show_titles_dict == show_titles
|
||||||
|
|
||||||
|
|
||||||
|
def test_episode_title_dict():
|
||||||
|
episode_titles_dict = episode_title_dict(show_list)
|
||||||
|
|
||||||
|
assert episode_titles_dict == episode_titles
|
||||||
|
|
||||||
|
|
||||||
|
def test_movies_title_dict():
|
||||||
|
movies_titles_dict = movies_title_dict(movie_list)
|
||||||
|
|
||||||
|
assert movies_titles_dict == movie_titles
|
||||||
|
|
||||||
|
|
||||||
|
def test_generate_library_guids_dict():
|
||||||
|
# Test with shows
|
||||||
|
(
|
||||||
|
show_titles_dict,
|
||||||
|
episode_titles_dict,
|
||||||
|
movies_titles_dict,
|
||||||
|
) = generate_library_guids_dict(show_list)
|
||||||
|
|
||||||
|
assert show_titles_dict == show_titles
|
||||||
|
assert episode_titles_dict == episode_titles
|
||||||
|
assert movies_titles_dict == {}
|
||||||
|
|
||||||
|
# Test with movies
|
||||||
|
(
|
||||||
|
show_titles_dict,
|
||||||
|
episode_titles_dict,
|
||||||
|
movies_titles_dict,
|
||||||
|
) = generate_library_guids_dict(movie_list)
|
||||||
|
|
||||||
|
assert show_titles_dict == {}
|
||||||
|
assert episode_titles_dict == {}
|
||||||
|
assert movies_titles_dict == movie_titles
|
||||||
78
test/test_main.py
Normal file
78
test/test_main.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
# getting the name of the directory
|
||||||
|
# where the this file is present.
|
||||||
|
current = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
|
# Getting the parent directory name
|
||||||
|
# where the current directory is present.
|
||||||
|
parent = os.path.dirname(current)
|
||||||
|
|
||||||
|
# adding the parent directory to
|
||||||
|
# the sys.path.
|
||||||
|
sys.path.append(parent)
|
||||||
|
|
||||||
|
from src.black_white import setup_black_white_lists
|
||||||
|
|
||||||
|
|
||||||
|
def test_setup_black_white_lists():
|
||||||
|
# Simple
|
||||||
|
blacklist_library = "library1, library2"
|
||||||
|
whitelist_library = "library1, library2"
|
||||||
|
blacklist_library_type = "library_type1, library_type2"
|
||||||
|
whitelist_library_type = "library_type1, library_type2"
|
||||||
|
blacklist_users = "user1, user2"
|
||||||
|
whitelist_users = "user1, user2"
|
||||||
|
|
||||||
|
(
|
||||||
|
results_blacklist_library,
|
||||||
|
return_whitelist_library,
|
||||||
|
return_blacklist_library_type,
|
||||||
|
return_whitelist_library_type,
|
||||||
|
return_blacklist_users,
|
||||||
|
return_whitelist_users,
|
||||||
|
) = setup_black_white_lists(
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
blacklist_users,
|
||||||
|
whitelist_users,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert results_blacklist_library == ["library1", "library2"]
|
||||||
|
assert return_whitelist_library == ["library1", "library2"]
|
||||||
|
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
||||||
|
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
||||||
|
assert return_blacklist_users == ["user1", "user2"]
|
||||||
|
assert return_whitelist_users == ["user1", "user2"]
|
||||||
|
|
||||||
|
# Library Mapping and user mapping
|
||||||
|
library_mapping = {"library1": "library3"}
|
||||||
|
user_mapping = {"user1": "user3"}
|
||||||
|
|
||||||
|
(
|
||||||
|
results_blacklist_library,
|
||||||
|
return_whitelist_library,
|
||||||
|
return_blacklist_library_type,
|
||||||
|
return_whitelist_library_type,
|
||||||
|
return_blacklist_users,
|
||||||
|
return_whitelist_users,
|
||||||
|
) = setup_black_white_lists(
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
blacklist_users,
|
||||||
|
whitelist_users,
|
||||||
|
library_mapping,
|
||||||
|
user_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert results_blacklist_library == ["library1", "library2", "library3"]
|
||||||
|
assert return_whitelist_library == ["library1", "library2", "library3"]
|
||||||
|
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
||||||
|
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
||||||
|
assert return_blacklist_users == ["user1", "user2", "user3"]
|
||||||
|
assert return_whitelist_users == ["user1", "user2", "user3"]
|
||||||
39
test/test_users.py
Normal file
39
test/test_users.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
# getting the name of the directory
|
||||||
|
# where the this file is present.
|
||||||
|
current = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
|
# Getting the parent directory name
|
||||||
|
# where the current directory is present.
|
||||||
|
parent = os.path.dirname(current)
|
||||||
|
|
||||||
|
# adding the parent directory to
|
||||||
|
# the sys.path.
|
||||||
|
sys.path.append(parent)
|
||||||
|
|
||||||
|
from src.users import (
|
||||||
|
combine_user_lists,
|
||||||
|
filter_user_lists,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_combine_user_lists():
|
||||||
|
server_1_users = ["test", "test3", "luigi311"]
|
||||||
|
server_2_users = ["luigi311", "test2", "test3"]
|
||||||
|
user_mapping = {"test2": "test"}
|
||||||
|
|
||||||
|
combined = combine_user_lists(server_1_users, server_2_users, user_mapping)
|
||||||
|
|
||||||
|
assert combined == {"luigi311": "luigi311", "test": "test2", "test3": "test3"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_filter_user_lists():
|
||||||
|
users = {"luigi311": "luigi311", "test": "test2", "test3": "test3"}
|
||||||
|
blacklist_users = ["test3"]
|
||||||
|
whitelist_users = ["test", "luigi311"]
|
||||||
|
|
||||||
|
filtered = filter_user_lists(users, blacklist_users, whitelist_users)
|
||||||
|
|
||||||
|
assert filtered == {"test": "test2", "luigi311": "luigi311"}
|
||||||
410
test/test_watched.py
Normal file
410
test/test_watched.py
Normal file
@@ -0,0 +1,410 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
# getting the name of the directory
|
||||||
|
# where the this file is present.
|
||||||
|
current = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
|
# Getting the parent directory name
|
||||||
|
# where the current directory is present.
|
||||||
|
parent = os.path.dirname(current)
|
||||||
|
|
||||||
|
# adding the parent directory to
|
||||||
|
# the sys.path.
|
||||||
|
sys.path.append(parent)
|
||||||
|
|
||||||
|
from src.watched import cleanup_watched, combine_watched_dicts
|
||||||
|
|
||||||
|
tv_shows_watched_list_1 = {
|
||||||
|
frozenset(
|
||||||
|
{
|
||||||
|
("tvdb", "75710"),
|
||||||
|
("title", "Criminal Minds"),
|
||||||
|
("imdb", "tt0452046"),
|
||||||
|
("locations", ("Criminal Minds",)),
|
||||||
|
("tmdb", "4057"),
|
||||||
|
}
|
||||||
|
): {
|
||||||
|
"Season 1": [
|
||||||
|
{
|
||||||
|
"imdb": "tt0550489",
|
||||||
|
"tmdb": "282843",
|
||||||
|
"tvdb": "176357",
|
||||||
|
"locations": (
|
||||||
|
"Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",
|
||||||
|
),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"imdb": "tt0550487",
|
||||||
|
"tmdb": "282861",
|
||||||
|
"tvdb": "300385",
|
||||||
|
"locations": ("Criminal Minds S01E02 Compulsion WEBDL-720p.mkv",),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||||
|
"Season 1": [
|
||||||
|
{"locations": ("Test S01E01.mkv",)},
|
||||||
|
{"locations": ("Test S01E02.mkv",)},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
movies_watched_list_1 = [
|
||||||
|
{
|
||||||
|
"imdb": "tt2380307",
|
||||||
|
"tmdb": "354912",
|
||||||
|
"title": "Coco",
|
||||||
|
"locations": ("Coco (2017) Remux-1080p.mkv",),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"tmdbcollection": "448150",
|
||||||
|
"imdb": "tt1431045",
|
||||||
|
"tmdb": "293660",
|
||||||
|
"title": "Deadpool",
|
||||||
|
"locations": ("Deadpool (2016) Remux-1080p.mkv",),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
tv_shows_watched_list_2 = {
|
||||||
|
frozenset(
|
||||||
|
{
|
||||||
|
("tvdb", "75710"),
|
||||||
|
("title", "Criminal Minds"),
|
||||||
|
("imdb", "tt0452046"),
|
||||||
|
("locations", ("Criminal Minds",)),
|
||||||
|
("tmdb", "4057"),
|
||||||
|
}
|
||||||
|
): {
|
||||||
|
"Season 1": [
|
||||||
|
{
|
||||||
|
"imdb": "tt0550487",
|
||||||
|
"tmdb": "282861",
|
||||||
|
"tvdb": "300385",
|
||||||
|
"locations": ("Criminal Minds S01E02 Compulsion WEBDL-720p.mkv",),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"imdb": "tt0550498",
|
||||||
|
"tmdb": "282865",
|
||||||
|
"tvdb": "300474",
|
||||||
|
"locations": (
|
||||||
|
"Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",
|
||||||
|
),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||||
|
"Season 1": [
|
||||||
|
{"locations": ("Test S01E02.mkv",)},
|
||||||
|
{"locations": ("Test S01E03.mkv",)},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
movies_watched_list_2 = [
|
||||||
|
{
|
||||||
|
"imdb": "tt2380307",
|
||||||
|
"tmdb": "354912",
|
||||||
|
"title": "Coco",
|
||||||
|
"locations": ("Coco (2017) Remux-1080p.mkv",),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"imdb": "tt0384793",
|
||||||
|
"tmdb": "9788",
|
||||||
|
"tvdb": "9103",
|
||||||
|
"title": "Accepted",
|
||||||
|
"locations": ("Accepted (2006) Remux-1080p.mkv",),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Test to see if objects get deleted all the way up to the root.
|
||||||
|
tv_shows_2_watched_list_1 = {
|
||||||
|
frozenset(
|
||||||
|
{
|
||||||
|
("tvdb", "75710"),
|
||||||
|
("title", "Criminal Minds"),
|
||||||
|
("imdb", "tt0452046"),
|
||||||
|
("locations", ("Criminal Minds",)),
|
||||||
|
("tmdb", "4057"),
|
||||||
|
}
|
||||||
|
): {
|
||||||
|
"Season 1": [
|
||||||
|
{
|
||||||
|
"imdb": "tt0550489",
|
||||||
|
"tmdb": "282843",
|
||||||
|
"tvdb": "176357",
|
||||||
|
"locations": (
|
||||||
|
"Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",
|
||||||
|
),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_tv_show_watched_list_1 = {
|
||||||
|
frozenset(
|
||||||
|
{
|
||||||
|
("tvdb", "75710"),
|
||||||
|
("title", "Criminal Minds"),
|
||||||
|
("imdb", "tt0452046"),
|
||||||
|
("locations", ("Criminal Minds",)),
|
||||||
|
("tmdb", "4057"),
|
||||||
|
}
|
||||||
|
): {
|
||||||
|
"Season 1": [
|
||||||
|
{
|
||||||
|
"imdb": "tt0550489",
|
||||||
|
"tmdb": "282843",
|
||||||
|
"tvdb": "176357",
|
||||||
|
"locations": (
|
||||||
|
"Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",
|
||||||
|
),
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||||
|
"Season 1": [{"locations": ("Test S01E01.mkv",)}]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_movie_watched_list_1 = [
|
||||||
|
{
|
||||||
|
"tmdbcollection": "448150",
|
||||||
|
"imdb": "tt1431045",
|
||||||
|
"tmdb": "293660",
|
||||||
|
"title": "Deadpool",
|
||||||
|
"locations": ("Deadpool (2016) Remux-1080p.mkv",),
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
expected_tv_show_watched_list_2 = {
|
||||||
|
frozenset(
|
||||||
|
{
|
||||||
|
("tvdb", "75710"),
|
||||||
|
("title", "Criminal Minds"),
|
||||||
|
("imdb", "tt0452046"),
|
||||||
|
("locations", ("Criminal Minds",)),
|
||||||
|
("tmdb", "4057"),
|
||||||
|
}
|
||||||
|
): {
|
||||||
|
"Season 1": [
|
||||||
|
{
|
||||||
|
"imdb": "tt0550498",
|
||||||
|
"tmdb": "282865",
|
||||||
|
"tvdb": "300474",
|
||||||
|
"locations": (
|
||||||
|
"Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",
|
||||||
|
),
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||||
|
"Season 1": [{"locations": ("Test S01E03.mkv",)}]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_movie_watched_list_2 = [
|
||||||
|
{
|
||||||
|
"imdb": "tt0384793",
|
||||||
|
"tmdb": "9788",
|
||||||
|
"tvdb": "9103",
|
||||||
|
"title": "Accepted",
|
||||||
|
"locations": ("Accepted (2006) Remux-1080p.mkv",),
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_simple_cleanup_watched():
|
||||||
|
user_watched_list_1 = {
|
||||||
|
"user1": {
|
||||||
|
"TV Shows": tv_shows_watched_list_1,
|
||||||
|
"Movies": movies_watched_list_1,
|
||||||
|
"Other Shows": tv_shows_2_watched_list_1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
user_watched_list_2 = {
|
||||||
|
"user1": {
|
||||||
|
"TV Shows": tv_shows_watched_list_2,
|
||||||
|
"Movies": movies_watched_list_2,
|
||||||
|
"Other Shows": tv_shows_2_watched_list_1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_watched_list_1 = {
|
||||||
|
"user1": {
|
||||||
|
"TV Shows": expected_tv_show_watched_list_1,
|
||||||
|
"Movies": expected_movie_watched_list_1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_watched_list_2 = {
|
||||||
|
"user1": {
|
||||||
|
"TV Shows": expected_tv_show_watched_list_2,
|
||||||
|
"Movies": expected_movie_watched_list_2,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return_watched_list_1 = cleanup_watched(user_watched_list_1, user_watched_list_2)
|
||||||
|
return_watched_list_2 = cleanup_watched(user_watched_list_2, user_watched_list_1)
|
||||||
|
|
||||||
|
assert return_watched_list_1 == expected_watched_list_1
|
||||||
|
assert return_watched_list_2 == expected_watched_list_2
|
||||||
|
|
||||||
|
|
||||||
|
def test_mapping_cleanup_watched():
|
||||||
|
user_watched_list_1 = {
|
||||||
|
"user1": {
|
||||||
|
"TV Shows": tv_shows_watched_list_1,
|
||||||
|
"Movies": movies_watched_list_1,
|
||||||
|
"Other Shows": tv_shows_2_watched_list_1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
user_watched_list_2 = {
|
||||||
|
"user2": {
|
||||||
|
"Shows": tv_shows_watched_list_2,
|
||||||
|
"Movies": movies_watched_list_2,
|
||||||
|
"Other Shows": tv_shows_2_watched_list_1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_watched_list_1 = {
|
||||||
|
"user1": {
|
||||||
|
"TV Shows": expected_tv_show_watched_list_1,
|
||||||
|
"Movies": expected_movie_watched_list_1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_watched_list_2 = {
|
||||||
|
"user2": {
|
||||||
|
"Shows": expected_tv_show_watched_list_2,
|
||||||
|
"Movies": expected_movie_watched_list_2,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
user_mapping = {"user1": "user2"}
|
||||||
|
library_mapping = {"TV Shows": "Shows"}
|
||||||
|
|
||||||
|
return_watched_list_1 = cleanup_watched(
|
||||||
|
user_watched_list_1,
|
||||||
|
user_watched_list_2,
|
||||||
|
user_mapping=user_mapping,
|
||||||
|
library_mapping=library_mapping,
|
||||||
|
)
|
||||||
|
return_watched_list_2 = cleanup_watched(
|
||||||
|
user_watched_list_2,
|
||||||
|
user_watched_list_1,
|
||||||
|
user_mapping=user_mapping,
|
||||||
|
library_mapping=library_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert return_watched_list_1 == expected_watched_list_1
|
||||||
|
assert return_watched_list_2 == expected_watched_list_2
|
||||||
|
|
||||||
|
|
||||||
|
def test_combine_watched_dicts():
|
||||||
|
input_watched = [
|
||||||
|
{
|
||||||
|
"test3": {
|
||||||
|
"Anime Movies": [
|
||||||
|
{
|
||||||
|
"title": "Ponyo",
|
||||||
|
"tmdb": "12429",
|
||||||
|
"imdb": "tt0876563",
|
||||||
|
"locations": ("Ponyo (2008) Bluray-1080p.mkv",),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Spirited Away",
|
||||||
|
"tmdb": "129",
|
||||||
|
"imdb": "tt0245429",
|
||||||
|
"locations": ("Spirited Away (2001) Bluray-1080p.mkv",),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Castle in the Sky",
|
||||||
|
"tmdb": "10515",
|
||||||
|
"imdb": "tt0092067",
|
||||||
|
"locations": ("Castle in the Sky (1986) Bluray-1080p.mkv",),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"test3": {"Anime Shows": {}}},
|
||||||
|
{"test3": {"Cartoon Shows": {}}},
|
||||||
|
{
|
||||||
|
"test3": {
|
||||||
|
"Shows": {
|
||||||
|
frozenset(
|
||||||
|
{
|
||||||
|
("tmdb", "64464"),
|
||||||
|
("tvdb", "301824"),
|
||||||
|
("tvrage", "45210"),
|
||||||
|
("title", "11.22.63"),
|
||||||
|
("locations", ("11.22.63",)),
|
||||||
|
("imdb", "tt2879552"),
|
||||||
|
}
|
||||||
|
): {
|
||||||
|
"Season 1": [
|
||||||
|
{
|
||||||
|
"imdb": "tt4460418",
|
||||||
|
"title": "The Rabbit Hole",
|
||||||
|
"locations": (
|
||||||
|
"11.22.63 S01E01 The Rabbit Hole Bluray-1080p.mkv",
|
||||||
|
),
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{"test3": {"Subbed Anime": {}}},
|
||||||
|
]
|
||||||
|
expected = {
|
||||||
|
"test3": {
|
||||||
|
"Anime Movies": [
|
||||||
|
{
|
||||||
|
"title": "Ponyo",
|
||||||
|
"tmdb": "12429",
|
||||||
|
"imdb": "tt0876563",
|
||||||
|
"locations": ("Ponyo (2008) Bluray-1080p.mkv",),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Spirited Away",
|
||||||
|
"tmdb": "129",
|
||||||
|
"imdb": "tt0245429",
|
||||||
|
"locations": ("Spirited Away (2001) Bluray-1080p.mkv",),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"title": "Castle in the Sky",
|
||||||
|
"tmdb": "10515",
|
||||||
|
"imdb": "tt0092067",
|
||||||
|
"locations": ("Castle in the Sky (1986) Bluray-1080p.mkv",),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
"Anime Shows": {},
|
||||||
|
"Cartoon Shows": {},
|
||||||
|
"Shows": {
|
||||||
|
frozenset(
|
||||||
|
{
|
||||||
|
("tmdb", "64464"),
|
||||||
|
("tvdb", "301824"),
|
||||||
|
("tvrage", "45210"),
|
||||||
|
("title", "11.22.63"),
|
||||||
|
("locations", ("11.22.63",)),
|
||||||
|
("imdb", "tt2879552"),
|
||||||
|
}
|
||||||
|
): {
|
||||||
|
"Season 1": [
|
||||||
|
{
|
||||||
|
"imdb": "tt4460418",
|
||||||
|
"title": "The Rabbit Hole",
|
||||||
|
"locations": (
|
||||||
|
"11.22.63 S01E01 The Rabbit Hole Bluray-1080p.mkv",
|
||||||
|
),
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"Subbed Anime": {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert combine_watched_dicts(input_watched) == expected
|
||||||
Reference in New Issue
Block a user