Compare commits
20 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9ffbc49ad3 | ||
|
|
644dc8e3af | ||
|
|
47bc4e94dc | ||
|
|
f17d39fe17 | ||
|
|
966dcacf8d | ||
|
|
9afc00443c | ||
|
|
3ec177ea64 | ||
|
|
b360c9fd0b | ||
|
|
1ed791b1ed | ||
|
|
f19b1a3063 | ||
|
|
190a72bd3c | ||
|
|
c848106ce7 | ||
|
|
dd319271bd | ||
|
|
16879cc728 | ||
|
|
942ec3533f | ||
|
|
9f6edfc91a | ||
|
|
827ace2e97 | ||
|
|
f6b57a1b4d | ||
|
|
88a7526721 | ||
|
|
1efb4d8543 |
85
.env.sample
85
.env.sample
@@ -1,42 +1,43 @@
|
|||||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||||
DRYRUN = "True"
|
DRYRUN = "True"
|
||||||
## Additional logging information
|
## Additional logging information
|
||||||
DEBUG = "True"
|
DEBUG = "True"
|
||||||
## Debugging level, "info" is default, "debug" is more verbose
|
## Debugging level, "info" is default, "debug" is more verbose
|
||||||
DEBUG_LEVEL = "info"
|
DEBUG_LEVEL = "info"
|
||||||
## How often to run the script in seconds
|
## How often to run the script in seconds
|
||||||
SLEEP_DURATION = "3600"
|
SLEEP_DURATION = "3600"
|
||||||
## Log file where all output will be written to
|
## Log file where all output will be written to
|
||||||
LOGFILE = "log.log"
|
LOGFILE = "log.log"
|
||||||
## Map usernames between plex and jellyfin in the event that they are different, order does not matter
|
## Map usernames between plex and jellyfin in the event that they are different, order does not matter
|
||||||
#USER_MAPPING = { "testuser2": "testuser3" }
|
#USER_MAPPING = { "testuser2": "testuser3" }
|
||||||
## Map libraries between plex and jellyfin in the even that they are different, order does not matter
|
## Map libraries between plex and jellyfin in the even that they are different, order does not matter
|
||||||
#LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
#LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||||
|
|
||||||
|
|
||||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||||
## Comma seperated list for multiple servers
|
## Comma seperated list for multiple servers
|
||||||
PLEX_BASEURL = "http://localhost:32400"
|
PLEX_BASEURL = "http://localhost:32400"
|
||||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||||
PLEX_TOKEN = "SuperSecretToken"
|
PLEX_TOKEN = "SuperSecretToken"
|
||||||
## If not using plex token then use username and password of the server admin along with the servername
|
## If not using plex token then use username and password of the server admin along with the servername
|
||||||
#PLEX_USERNAME = ""
|
#PLEX_USERNAME = ""
|
||||||
#PLEX_PASSWORD = ""
|
#PLEX_PASSWORD = ""
|
||||||
#PLEX_SERVERNAME = "Plex Server"
|
#PLEX_SERVERNAME = "Plex Server"
|
||||||
|
## Skip hostname validation for ssl certificates.
|
||||||
|
SSL_BYPASS = "False"
|
||||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
|
||||||
## Comma seperated list for multiple servers
|
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
## Comma seperated list for multiple servers
|
||||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||||
JELLYFIN_TOKEN = "SuperSecretToken"
|
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||||
|
JELLYFIN_TOKEN = "SuperSecretToken"
|
||||||
|
|
||||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
|
||||||
#BLACKLIST_LIBRARY = ""
|
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||||
#WHITELIST_LIBRARY = ""
|
#BLACKLIST_LIBRARY = ""
|
||||||
#BLACKLIST_LIBRARY_TYPE = ""
|
#WHITELIST_LIBRARY = ""
|
||||||
#WHITELIST_LIBRARY_TYPE = ""
|
#BLACKLIST_LIBRARY_TYPE = ""
|
||||||
#BLACKLIST_USERS = ""
|
#WHITELIST_LIBRARY_TYPE = ""
|
||||||
WHITELIST_USERS = "testuser1,testuser2"
|
#BLACKLIST_USERS = ""
|
||||||
|
WHITELIST_USERS = "testuser1,testuser2"
|
||||||
|
|||||||
172
.github/workflows/ci.yml
vendored
172
.github/workflows/ci.yml
vendored
@@ -1,86 +1,86 @@
|
|||||||
name: CI
|
name: CI
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- .gitignore
|
- .gitignore
|
||||||
- "*.md"
|
- "*.md"
|
||||||
pull_request:
|
pull_request:
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- .gitignore
|
- .gitignore
|
||||||
- "*.md"
|
- "*.md"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pytest:
|
pytest:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: "Install dependencies"
|
- name: "Install dependencies"
|
||||||
run: pip install -r requirements.txt && pip install -r test/requirements.txt
|
run: pip install -r requirements.txt && pip install -r test/requirements.txt
|
||||||
|
|
||||||
- name: "Run tests"
|
- name: "Run tests"
|
||||||
run: pytest -vvv
|
run: pytest -vvv
|
||||||
|
|
||||||
docker:
|
docker:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: pytest
|
needs: pytest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: docker_meta
|
id: docker_meta
|
||||||
env:
|
env:
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
if: "${{ env.DOCKER_USERNAME != '' }}"
|
if: "${{ env.DOCKER_USERNAME != '' }}"
|
||||||
uses: docker/metadata-action@v4
|
uses: docker/metadata-action@v4
|
||||||
with:
|
with:
|
||||||
images: ${{ secrets.DOCKER_USERNAME }}/jellyplex-watched # list of Docker images to use as base name for tags
|
images: ${{ secrets.DOCKER_USERNAME }}/jellyplex-watched # list of Docker images to use as base name for tags
|
||||||
tags: |
|
tags: |
|
||||||
type=raw,value=latest,enable={{is_default_branch}}
|
type=raw,value=latest,enable={{is_default_branch}}
|
||||||
type=ref,event=branch
|
type=ref,event=branch
|
||||||
type=ref,event=pr
|
type=ref,event=pr
|
||||||
type=semver,pattern={{version}}
|
type=semver,pattern={{version}}
|
||||||
type=semver,pattern={{major}}.{{minor}}
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
type=sha
|
type=sha
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v1
|
uses: docker/setup-qemu-action@v1
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v1
|
uses: docker/setup-buildx-action@v1
|
||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to DockerHub
|
||||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||||
uses: docker/login-action@v1
|
uses: docker/login-action@v1
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_TOKEN }}
|
password: ${{ secrets.DOCKER_TOKEN }}
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
id: build
|
id: build
|
||||||
if: "${{ steps.docker_meta.outcome == 'skipped' }}"
|
if: "${{ steps.docker_meta.outcome == 'skipped' }}"
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v2
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
push: false
|
push: false
|
||||||
tags: jellyplex-watched:action
|
tags: jellyplex-watched:action
|
||||||
|
|
||||||
- name: Build Push
|
- name: Build Push
|
||||||
id: build_push
|
id: build_push
|
||||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v2
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
push: true
|
push: true
|
||||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||||
|
|
||||||
# Echo digest so users can validate their image
|
# Echo digest so users can validate their image
|
||||||
- name: Image digest
|
- name: Image digest
|
||||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||||
run: echo "${{ steps.build_push.outputs.digest }}"
|
run: echo "${{ steps.build_push.outputs.digest }}"
|
||||||
|
|||||||
41
.github/workflows/codeql.yml
vendored
Normal file
41
.github/workflows/codeql.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
name: "CodeQL"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ "main" ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ "main" ]
|
||||||
|
schedule:
|
||||||
|
- cron: "23 20 * * 6"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: [ python ]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v2
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
queries: +security-and-quality
|
||||||
|
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v2
|
||||||
|
with:
|
||||||
|
category: "/language:${{ matrix.language }}"
|
||||||
264
.gitignore
vendored
264
.gitignore
vendored
@@ -1,132 +1,132 @@
|
|||||||
.env
|
.env
|
||||||
*.prof
|
*.prof
|
||||||
|
|
||||||
# Byte-compiled / optimized / DLL files
|
# Byte-compiled / optimized / DLL files
|
||||||
__pycache__/
|
__pycache__/
|
||||||
*.py[cod]
|
*.py[cod]
|
||||||
*$py.class
|
*$py.class
|
||||||
|
|
||||||
# C extensions
|
# C extensions
|
||||||
*.so
|
*.so
|
||||||
|
|
||||||
# Distribution / packaging
|
# Distribution / packaging
|
||||||
.Python
|
.Python
|
||||||
build/
|
build/
|
||||||
develop-eggs/
|
develop-eggs/
|
||||||
dist/
|
dist/
|
||||||
downloads/
|
downloads/
|
||||||
eggs/
|
eggs/
|
||||||
.eggs/
|
.eggs/
|
||||||
lib/
|
lib/
|
||||||
lib64/
|
lib64/
|
||||||
parts/
|
parts/
|
||||||
sdist/
|
sdist/
|
||||||
var/
|
var/
|
||||||
wheels/
|
wheels/
|
||||||
pip-wheel-metadata/
|
pip-wheel-metadata/
|
||||||
share/python-wheels/
|
share/python-wheels/
|
||||||
*.egg-info/
|
*.egg-info/
|
||||||
.installed.cfg
|
.installed.cfg
|
||||||
*.egg
|
*.egg
|
||||||
MANIFEST
|
MANIFEST
|
||||||
|
|
||||||
# PyInstaller
|
# PyInstaller
|
||||||
# Usually these files are written by a python script from a template
|
# Usually these files are written by a python script from a template
|
||||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
*.manifest
|
*.manifest
|
||||||
*.spec
|
*.spec
|
||||||
|
|
||||||
# Installer logs
|
# Installer logs
|
||||||
pip-log.txt
|
pip-log.txt
|
||||||
pip-delete-this-directory.txt
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
# Unit test / coverage reports
|
# Unit test / coverage reports
|
||||||
htmlcov/
|
htmlcov/
|
||||||
.tox/
|
.tox/
|
||||||
.nox/
|
.nox/
|
||||||
.coverage
|
.coverage
|
||||||
.coverage.*
|
.coverage.*
|
||||||
.cache
|
.cache
|
||||||
nosetests.xml
|
nosetests.xml
|
||||||
coverage.xml
|
coverage.xml
|
||||||
*.cover
|
*.cover
|
||||||
*.py,cover
|
*.py,cover
|
||||||
.hypothesis/
|
.hypothesis/
|
||||||
.pytest_cache/
|
.pytest_cache/
|
||||||
|
|
||||||
# Translations
|
# Translations
|
||||||
*.mo
|
*.mo
|
||||||
*.pot
|
*.pot
|
||||||
|
|
||||||
# Django stuff:
|
# Django stuff:
|
||||||
*.log
|
*.log
|
||||||
local_settings.py
|
local_settings.py
|
||||||
db.sqlite3
|
db.sqlite3
|
||||||
db.sqlite3-journal
|
db.sqlite3-journal
|
||||||
|
|
||||||
# Flask stuff:
|
# Flask stuff:
|
||||||
instance/
|
instance/
|
||||||
.webassets-cache
|
.webassets-cache
|
||||||
|
|
||||||
# Scrapy stuff:
|
# Scrapy stuff:
|
||||||
.scrapy
|
.scrapy
|
||||||
|
|
||||||
# Sphinx documentation
|
# Sphinx documentation
|
||||||
docs/_build/
|
docs/_build/
|
||||||
|
|
||||||
# PyBuilder
|
# PyBuilder
|
||||||
target/
|
target/
|
||||||
|
|
||||||
# Jupyter Notebook
|
# Jupyter Notebook
|
||||||
.ipynb_checkpoints
|
.ipynb_checkpoints
|
||||||
|
|
||||||
# IPython
|
# IPython
|
||||||
profile_default/
|
profile_default/
|
||||||
ipython_config.py
|
ipython_config.py
|
||||||
|
|
||||||
# pyenv
|
# pyenv
|
||||||
.python-version
|
.python-version
|
||||||
|
|
||||||
# pipenv
|
# pipenv
|
||||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
# install all needed dependencies.
|
# install all needed dependencies.
|
||||||
#Pipfile.lock
|
#Pipfile.lock
|
||||||
|
|
||||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||||
__pypackages__/
|
__pypackages__/
|
||||||
|
|
||||||
# Celery stuff
|
# Celery stuff
|
||||||
celerybeat-schedule
|
celerybeat-schedule
|
||||||
celerybeat.pid
|
celerybeat.pid
|
||||||
|
|
||||||
# SageMath parsed files
|
# SageMath parsed files
|
||||||
*.sage.py
|
*.sage.py
|
||||||
|
|
||||||
# Environments
|
# Environments
|
||||||
.env
|
.env
|
||||||
.venv
|
.venv
|
||||||
env/
|
env/
|
||||||
venv/
|
venv/
|
||||||
ENV/
|
ENV/
|
||||||
env.bak/
|
env.bak/
|
||||||
venv.bak/
|
venv.bak/
|
||||||
|
|
||||||
# Spyder project settings
|
# Spyder project settings
|
||||||
.spyderproject
|
.spyderproject
|
||||||
.spyproject
|
.spyproject
|
||||||
|
|
||||||
# Rope project settings
|
# Rope project settings
|
||||||
.ropeproject
|
.ropeproject
|
||||||
|
|
||||||
# mkdocs documentation
|
# mkdocs documentation
|
||||||
/site
|
/site
|
||||||
|
|
||||||
# mypy
|
# mypy
|
||||||
.mypy_cache/
|
.mypy_cache/
|
||||||
.dmypy.json
|
.dmypy.json
|
||||||
dmypy.json
|
dmypy.json
|
||||||
|
|
||||||
# Pyre type checker
|
# Pyre type checker
|
||||||
.pyre/
|
.pyre/
|
||||||
|
|||||||
32
.vscode/launch.json
vendored
32
.vscode/launch.json
vendored
@@ -1,16 +1,16 @@
|
|||||||
{
|
{
|
||||||
// Use IntelliSense to learn about possible attributes.
|
// Use IntelliSense to learn about possible attributes.
|
||||||
// Hover to view descriptions of existing attributes.
|
// Hover to view descriptions of existing attributes.
|
||||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||||
"version": "0.2.0",
|
"version": "0.2.0",
|
||||||
"configurations": [
|
"configurations": [
|
||||||
{
|
{
|
||||||
"name": "Python: Main",
|
"name": "Python: Main",
|
||||||
"type": "python",
|
"type": "python",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"program": "main.py",
|
"program": "main.py",
|
||||||
"console": "integratedTerminal",
|
"console": "integratedTerminal",
|
||||||
"justMyCode": true
|
"justMyCode": true
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
76
Dockerfile
76
Dockerfile
@@ -1,35 +1,41 @@
|
|||||||
FROM python:3-slim
|
FROM python:3-slim
|
||||||
|
|
||||||
ENV DRYRUN 'True'
|
ENV DRYRUN 'True'
|
||||||
ENV DEBUG 'True'
|
ENV DEBUG 'True'
|
||||||
ENV DEBUG_LEVEL 'INFO'
|
ENV DEBUG_LEVEL 'INFO'
|
||||||
ENV SLEEP_DURATION '3600'
|
ENV SLEEP_DURATION '3600'
|
||||||
ENV LOGFILE 'log.log'
|
ENV LOGFILE 'log.log'
|
||||||
|
|
||||||
ENV USER_MAPPING '{ "User Test": "User Test2" }'
|
ENV USER_MAPPING '{ "User Test": "User Test2" }'
|
||||||
ENV LIBRARY_MAPPING '{ "Shows Test": "TV Shows Test" }'
|
ENV LIBRARY_MAPPING '{ "Shows Test": "TV Shows Test" }'
|
||||||
|
|
||||||
ENV PLEX_BASEURL 'http://localhost:32400'
|
ENV PLEX_BASEURL 'http://localhost:32400'
|
||||||
ENV PLEX_TOKEN ''
|
ENV PLEX_TOKEN ''
|
||||||
ENV PLEX_USERNAME ''
|
ENV PLEX_USERNAME ''
|
||||||
ENV PLEX_PASSWORD ''
|
ENV PLEX_PASSWORD ''
|
||||||
ENV PLEX_SERVERNAME ''
|
ENV PLEX_SERVERNAME ''
|
||||||
|
|
||||||
ENV JELLYFIN_BASEURL 'http://localhost:8096'
|
ENV JELLYFIN_BASEURL 'http://localhost:8096'
|
||||||
ENV JELLYFIN_TOKEN ''
|
ENV JELLYFIN_TOKEN ''
|
||||||
|
|
||||||
ENV BLACKLIST_LIBRARY ''
|
ENV BLACKLIST_LIBRARY ''
|
||||||
ENV WHITELIST_LIBRARY ''
|
ENV WHITELIST_LIBRARY ''
|
||||||
ENV BLACKLIST_LIBRARY_TYPE ''
|
ENV BLACKLIST_LIBRARY_TYPE ''
|
||||||
ENV WHITELIST_LIBRARY_TYPE ''
|
ENV WHITELIST_LIBRARY_TYPE ''
|
||||||
ENV BLACKLIST_USERS ''
|
ENV BLACKLIST_USERS ''
|
||||||
ENV WHITELIST_USERS ''
|
ENV WHITELIST_USERS ''
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY ./requirements.txt ./
|
RUN apt-get update && \
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
apt-get install -y --no-install-recommends \
|
||||||
|
build-essential && \
|
||||||
COPY . .
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
CMD ["python", "-u", "main.py"]
|
|
||||||
|
COPY ./requirements.txt ./
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
CMD ["python", "-u", "main.py"]
|
||||||
|
|||||||
146
README.md
146
README.md
@@ -1,73 +1,73 @@
|
|||||||
# JellyPlex-Watched
|
# JellyPlex-Watched
|
||||||
|
|
||||||
[](https://www.codacy.com/gh/luigi311/JellyPlex-Watched/dashboard?utm_source=github.com&utm_medium=referral&utm_content=luigi311/JellyPlex-Watched&utm_campaign=Badge_Grade)
|
[](https://www.codacy.com/gh/luigi311/JellyPlex-Watched/dashboard?utm_source=github.com&utm_medium=referral&utm_content=luigi311/JellyPlex-Watched&utm_campaign=Badge_Grade)
|
||||||
|
|
||||||
Sync watched between jellyfin and plex
|
Sync watched between jellyfin and plex
|
||||||
|
|
||||||
## Description
|
## Description
|
||||||
|
|
||||||
Keep in sync all your users watched history between jellyfin and plex servers locally. This uses the imdb ids and any other matching id to find the correct episode/movie between the two. This is not perfect but it works for most cases. You can use this for as many servers as you want by enterying multiple options in the .env plex/jellyfin section seperated by commas.
|
Keep in sync all your users watched history between jellyfin and plex servers locally. This uses the imdb ids and any other matching id to find the correct episode/movie between the two. This is not perfect but it works for most cases. You can use this for as many servers as you want by enterying multiple options in the .env plex/jellyfin section seperated by commas.
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
### Baremetal
|
### Baremetal
|
||||||
|
|
||||||
- Setup virtualenv of your choice
|
- Setup virtualenv of your choice
|
||||||
|
|
||||||
- Install dependencies
|
- Install dependencies
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
pip install -r requirements.txt
|
pip install -r requirements.txt
|
||||||
```
|
```
|
||||||
|
|
||||||
- Create a .env file similar to .env.sample, uncomment whitelist and blacklist if needed, fill in baseurls and tokens
|
- Create a .env file similar to .env.sample, uncomment whitelist and blacklist if needed, fill in baseurls and tokens
|
||||||
|
|
||||||
- Run
|
- Run
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
python main.py
|
python main.py
|
||||||
```
|
```
|
||||||
|
|
||||||
### Docker
|
### Docker
|
||||||
|
|
||||||
- Build docker image
|
- Build docker image
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker build -t jellyplex-watched .
|
docker build -t jellyplex-watched .
|
||||||
```
|
```
|
||||||
|
|
||||||
- or use pre-built image
|
- or use pre-built image
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker pull luigi311/jellyplex-watched:latest
|
docker pull luigi311/jellyplex-watched:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
#### With variables
|
#### With variables
|
||||||
|
|
||||||
- Run
|
- Run
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run --rm -it -e PLEX_TOKEN='SuperSecretToken' luigi311/jellyplex-watched:latest
|
docker run --rm -it -e PLEX_TOKEN='SuperSecretToken' luigi311/jellyplex-watched:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
#### With .env
|
#### With .env
|
||||||
|
|
||||||
- Create a .env file similar to .env.sample and set the MNEMONIC variable to your seed phrase
|
- Create a .env file similar to .env.sample and set the variables to match your setup
|
||||||
|
|
||||||
- Run
|
- Run
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run --rm -it -v "$(pwd)/.env:/app/.env" luigi311/jellyplex-watched:latest
|
docker run --rm -it -v "$(pwd)/.env:/app/.env" luigi311/jellyplex-watched:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
I am open to recieving pull requests. If you are submitting a pull request, please make sure run it locally for a day or two to make sure it is working as expected and stable. Make all pull requests against the dev branch and nothing will be merged into the main without going through the lower branches.
|
I am open to recieving pull requests. If you are submitting a pull request, please make sure run it locally for a day or two to make sure it is working as expected and stable. Make all pull requests against the dev branch and nothing will be merged into the main without going through the lower branches.
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
This is currently under the GNU General Public License v3.0.
|
This is currently under the GNU General Public License v3.0.
|
||||||
|
|||||||
21
main.py
21
main.py
@@ -1,10 +1,11 @@
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
# Check python version 3.6 or higher
|
# Check python version 3.6 or higher
|
||||||
if not (3, 6) <= tuple(map(int, sys.version_info[:2])):
|
if not (3, 6) <= tuple(map(int, sys.version_info[:2])):
|
||||||
print("This script requires Python 3.6 or higher")
|
print("This script requires Python 3.6 or higher")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
from src.main import main
|
from src.main import main
|
||||||
main()
|
|
||||||
|
main()
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
plexapi
|
plexapi
|
||||||
requests
|
requests
|
||||||
python-dotenv
|
python-dotenv
|
||||||
|
aiohttp
|
||||||
|
|||||||
344
src/functions.py
344
src/functions.py
@@ -1,155 +1,189 @@
|
|||||||
import os
|
import os
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
load_dotenv(override=True)
|
load_dotenv(override=True)
|
||||||
|
|
||||||
logfile = os.getenv("LOGFILE","log.log")
|
logfile = os.getenv("LOGFILE", "log.log")
|
||||||
|
|
||||||
def logger(message: str, log_type=0):
|
|
||||||
debug = str_to_bool(os.getenv("DEBUG", "True"))
|
def logger(message: str, log_type=0):
|
||||||
debug_level = os.getenv("DEBUG_LEVEL", "info").lower()
|
debug = str_to_bool(os.getenv("DEBUG", "True"))
|
||||||
|
debug_level = os.getenv("DEBUG_LEVEL", "info").lower()
|
||||||
output = str(message)
|
|
||||||
if log_type == 0:
|
output = str(message)
|
||||||
pass
|
if log_type == 0:
|
||||||
elif log_type == 1 and (debug or debug_level == "info"):
|
pass
|
||||||
output = f"[INFO]: {output}"
|
elif log_type == 1 and (debug and debug_level == "info"):
|
||||||
elif log_type == 2:
|
output = f"[INFO]: {output}"
|
||||||
output = f"[ERROR]: {output}"
|
elif log_type == 2:
|
||||||
elif log_type == 3 and (debug and debug_level == "debug"):
|
output = f"[ERROR]: {output}"
|
||||||
output = f"[DEBUG]: {output}"
|
elif log_type == 3 and (debug and debug_level == "debug"):
|
||||||
else:
|
output = f"[DEBUG]: {output}"
|
||||||
output = None
|
elif log_type == 4:
|
||||||
|
output = f"[WARNING]: {output}"
|
||||||
if output is not None:
|
else:
|
||||||
print(output)
|
output = None
|
||||||
file = open(logfile, "a", encoding="utf-8")
|
|
||||||
file.write(output + "\n")
|
if output is not None:
|
||||||
|
print(output)
|
||||||
# Reimplementation of distutils.util.strtobool due to it being deprecated
|
file = open(logfile, "a", encoding="utf-8")
|
||||||
# Source: https://github.com/PostHog/posthog/blob/01e184c29d2c10c43166f1d40a334abbc3f99d8a/posthog/utils.py#L668
|
file.write(output + "\n")
|
||||||
def str_to_bool(value: any) -> bool:
|
|
||||||
if not value:
|
|
||||||
return False
|
# Reimplementation of distutils.util.strtobool due to it being deprecated
|
||||||
return str(value).lower() in ("y", "yes", "t", "true", "on", "1")
|
# Source: https://github.com/PostHog/posthog/blob/01e184c29d2c10c43166f1d40a334abbc3f99d8a/posthog/utils.py#L668
|
||||||
|
def str_to_bool(value: any) -> bool:
|
||||||
# Get mapped value
|
if not value:
|
||||||
def search_mapping(dictionary: dict, key_value: str):
|
return False
|
||||||
if key_value in dictionary.keys():
|
return str(value).lower() in ("y", "yes", "t", "true", "on", "1")
|
||||||
return dictionary[key_value]
|
|
||||||
elif key_value.lower() in dictionary.keys():
|
|
||||||
return dictionary[key_value.lower()]
|
# Get mapped value
|
||||||
elif key_value in dictionary.values():
|
def search_mapping(dictionary: dict, key_value: str):
|
||||||
return list(dictionary.keys())[list(dictionary.values()).index(key_value)]
|
if key_value in dictionary.keys():
|
||||||
elif key_value.lower() in dictionary.values():
|
return dictionary[key_value]
|
||||||
return list(dictionary.keys())[list(dictionary.values()).index(key_value.lower())]
|
elif key_value.lower() in dictionary.keys():
|
||||||
else:
|
return dictionary[key_value.lower()]
|
||||||
return None
|
elif key_value in dictionary.values():
|
||||||
|
return list(dictionary.keys())[list(dictionary.values()).index(key_value)]
|
||||||
|
elif key_value.lower() in dictionary.values():
|
||||||
def check_skip_logic(library_title, library_type, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping):
|
return list(dictionary.keys())[
|
||||||
skip_reason = None
|
list(dictionary.values()).index(key_value.lower())
|
||||||
|
]
|
||||||
if library_type.lower() in blacklist_library_type:
|
else:
|
||||||
skip_reason = "is blacklist_library_type"
|
return None
|
||||||
|
|
||||||
if library_title.lower() in [x.lower() for x in blacklist_library]:
|
|
||||||
skip_reason = "is blacklist_library"
|
def check_skip_logic(
|
||||||
|
library_title,
|
||||||
library_other = None
|
library_type,
|
||||||
if library_mapping:
|
blacklist_library,
|
||||||
library_other = search_mapping(library_mapping, library_title)
|
whitelist_library,
|
||||||
if library_other:
|
blacklist_library_type,
|
||||||
if library_other.lower() in [x.lower() for x in blacklist_library]:
|
whitelist_library_type,
|
||||||
skip_reason = "is blacklist_library"
|
library_mapping,
|
||||||
|
):
|
||||||
if len(whitelist_library_type) > 0:
|
skip_reason = None
|
||||||
if library_type.lower() not in whitelist_library_type:
|
|
||||||
skip_reason = "is not whitelist_library_type"
|
if library_type.lower() in blacklist_library_type:
|
||||||
|
skip_reason = "is blacklist_library_type"
|
||||||
# if whitelist is not empty and library is not in whitelist
|
|
||||||
if len(whitelist_library) > 0:
|
if library_title.lower() in [x.lower() for x in blacklist_library]:
|
||||||
if library_title.lower() not in [x.lower() for x in whitelist_library]:
|
skip_reason = "is blacklist_library"
|
||||||
skip_reason = "is not whitelist_library"
|
|
||||||
|
library_other = None
|
||||||
if library_other:
|
if library_mapping:
|
||||||
if library_other.lower() not in [x.lower() for x in whitelist_library]:
|
library_other = search_mapping(library_mapping, library_title)
|
||||||
skip_reason = "is not whitelist_library"
|
if library_other:
|
||||||
|
if library_other.lower() in [x.lower() for x in blacklist_library]:
|
||||||
return skip_reason
|
skip_reason = "is blacklist_library"
|
||||||
|
|
||||||
|
if len(whitelist_library_type) > 0:
|
||||||
def generate_library_guids_dict(user_list: dict):
|
if library_type.lower() not in whitelist_library_type:
|
||||||
show_output_dict = {}
|
skip_reason = "is not whitelist_library_type"
|
||||||
episode_output_dict = {}
|
|
||||||
movies_output_dict = {}
|
# if whitelist is not empty and library is not in whitelist
|
||||||
|
if len(whitelist_library) > 0:
|
||||||
try:
|
if library_title.lower() not in [x.lower() for x in whitelist_library]:
|
||||||
show_output_keys = user_list.keys()
|
skip_reason = "is not whitelist_library"
|
||||||
show_output_keys = ([ dict(x) for x in list(show_output_keys) ])
|
|
||||||
for show_key in show_output_keys:
|
if library_other:
|
||||||
for provider_key, provider_value in show_key.items():
|
if library_other.lower() not in [x.lower() for x in whitelist_library]:
|
||||||
# Skip title
|
skip_reason = "is not whitelist_library"
|
||||||
if provider_key.lower() == "title":
|
|
||||||
continue
|
return skip_reason
|
||||||
if provider_key.lower() not in show_output_dict:
|
|
||||||
show_output_dict[provider_key.lower()] = []
|
|
||||||
if provider_key.lower() == "locations":
|
def generate_library_guids_dict(user_list: dict):
|
||||||
for show_location in provider_value:
|
show_output_dict = {}
|
||||||
show_output_dict[provider_key.lower()].append(show_location)
|
episode_output_dict = {}
|
||||||
else:
|
movies_output_dict = {}
|
||||||
show_output_dict[provider_key.lower()].append(provider_value.lower())
|
|
||||||
except:
|
try:
|
||||||
pass
|
show_output_keys = user_list.keys()
|
||||||
|
show_output_keys = [dict(x) for x in list(show_output_keys)]
|
||||||
try:
|
for show_key in show_output_keys:
|
||||||
for show in user_list:
|
for provider_key, provider_value in show_key.items():
|
||||||
for season in user_list[show]:
|
# Skip title
|
||||||
for episode in user_list[show][season]:
|
if provider_key.lower() == "title":
|
||||||
for episode_key, episode_value in episode.items():
|
continue
|
||||||
if episode_key.lower() not in episode_output_dict:
|
if provider_key.lower() not in show_output_dict:
|
||||||
episode_output_dict[episode_key.lower()] = []
|
show_output_dict[provider_key.lower()] = []
|
||||||
if episode_key == "locations":
|
if provider_key.lower() == "locations":
|
||||||
for episode_location in episode_value:
|
for show_location in provider_value:
|
||||||
episode_output_dict[episode_key.lower()].append(episode_location)
|
show_output_dict[provider_key.lower()].append(show_location)
|
||||||
else:
|
else:
|
||||||
episode_output_dict[episode_key.lower()].append(episode_value.lower())
|
show_output_dict[provider_key.lower()].append(
|
||||||
except:
|
provider_value.lower()
|
||||||
pass
|
)
|
||||||
|
except Exception:
|
||||||
try:
|
logger("Generating show_output_dict failed, skipping", 1)
|
||||||
for movie in user_list:
|
|
||||||
for movie_key, movie_value in movie.items():
|
try:
|
||||||
if movie_key.lower() not in movies_output_dict:
|
for show in user_list:
|
||||||
movies_output_dict[movie_key.lower()] = []
|
for season in user_list[show]:
|
||||||
if movie_key == "locations":
|
for episode in user_list[show][season]:
|
||||||
for movie_location in movie_value:
|
for episode_key, episode_value in episode.items():
|
||||||
movies_output_dict[movie_key.lower()].append(movie_location)
|
if episode_key.lower() not in episode_output_dict:
|
||||||
else:
|
episode_output_dict[episode_key.lower()] = []
|
||||||
movies_output_dict[movie_key.lower()].append(movie_value.lower())
|
if episode_key == "locations":
|
||||||
except:
|
for episode_location in episode_value:
|
||||||
pass
|
episode_output_dict[episode_key.lower()].append(
|
||||||
|
episode_location
|
||||||
return show_output_dict, episode_output_dict, movies_output_dict
|
)
|
||||||
|
else:
|
||||||
def future_thread_executor(args: list, workers: int = -1):
|
episode_output_dict[episode_key.lower()].append(
|
||||||
futures_list = []
|
episode_value.lower()
|
||||||
results = []
|
)
|
||||||
|
except Exception:
|
||||||
if workers == -1:
|
logger("Generating episode_output_dict failed, skipping", 1)
|
||||||
workers = min(32, os.cpu_count()*1.25)
|
|
||||||
|
try:
|
||||||
with ThreadPoolExecutor(max_workers=workers) as executor:
|
for movie in user_list:
|
||||||
for arg in args:
|
for movie_key, movie_value in movie.items():
|
||||||
# * arg unpacks the list into actual arguments
|
if movie_key.lower() not in movies_output_dict:
|
||||||
futures_list.append(executor.submit(*arg))
|
movies_output_dict[movie_key.lower()] = []
|
||||||
|
if movie_key == "locations":
|
||||||
for future in futures_list:
|
for movie_location in movie_value:
|
||||||
try:
|
movies_output_dict[movie_key.lower()].append(movie_location)
|
||||||
result = future.result()
|
else:
|
||||||
results.append(result)
|
movies_output_dict[movie_key.lower()].append(movie_value.lower())
|
||||||
except Exception as e:
|
except Exception:
|
||||||
raise Exception(e)
|
logger("Generating movies_output_dict failed, skipping", 1)
|
||||||
|
|
||||||
return results
|
return show_output_dict, episode_output_dict, movies_output_dict
|
||||||
|
|
||||||
|
|
||||||
|
def combine_watched_dicts(dicts: list):
|
||||||
|
combined_dict = {}
|
||||||
|
for single_dict in dicts:
|
||||||
|
for key, value in single_dict.items():
|
||||||
|
if key not in combined_dict:
|
||||||
|
combined_dict[key] = {}
|
||||||
|
for subkey, subvalue in value.items():
|
||||||
|
combined_dict[key][subkey] = subvalue
|
||||||
|
|
||||||
|
return combined_dict
|
||||||
|
|
||||||
|
|
||||||
|
def future_thread_executor(args: list, workers: int = -1):
|
||||||
|
futures_list = []
|
||||||
|
results = []
|
||||||
|
|
||||||
|
if workers == -1:
|
||||||
|
workers = min(32, os.cpu_count() * 1.25)
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=workers) as executor:
|
||||||
|
for arg in args:
|
||||||
|
# * arg unpacks the list into actual arguments
|
||||||
|
futures_list.append(executor.submit(*arg))
|
||||||
|
|
||||||
|
for future in futures_list:
|
||||||
|
try:
|
||||||
|
result = future.result()
|
||||||
|
results.append(result)
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|||||||
973
src/jellyfin.py
973
src/jellyfin.py
@@ -1,327 +1,646 @@
|
|||||||
import requests
|
import asyncio, aiohttp
|
||||||
from src.functions import logger, search_mapping, str_to_bool, check_skip_logic, generate_library_guids_dict, future_thread_executor
|
from src.functions import (
|
||||||
|
logger,
|
||||||
class Jellyfin():
|
search_mapping,
|
||||||
def __init__(self, baseurl, token):
|
check_skip_logic,
|
||||||
self.baseurl = baseurl
|
generate_library_guids_dict,
|
||||||
self.token = token
|
combine_watched_dicts,
|
||||||
self.session = requests.Session()
|
)
|
||||||
|
|
||||||
if not self.baseurl:
|
|
||||||
raise Exception("Jellyfin baseurl not set")
|
class Jellyfin:
|
||||||
|
def __init__(self, baseurl, token):
|
||||||
if not self.token:
|
self.baseurl = baseurl
|
||||||
raise Exception("Jellyfin token not set")
|
self.token = token
|
||||||
|
|
||||||
self.users = self.get_users()
|
if not self.baseurl:
|
||||||
|
raise Exception("Jellyfin baseurl not set")
|
||||||
|
|
||||||
def query(self, query, query_type):
|
if not self.token:
|
||||||
try:
|
raise Exception("Jellyfin token not set")
|
||||||
response = None
|
|
||||||
|
self.users = asyncio.run(self.get_users())
|
||||||
headers = {
|
|
||||||
"Accept": "application/json",
|
async def query(self, query, query_type, session, identifiers=None):
|
||||||
"X-Emby-Token": self.token
|
try:
|
||||||
}
|
results = None
|
||||||
if query_type == "get":
|
headers = {"Accept": "application/json", "X-Emby-Token": self.token}
|
||||||
response = self.session.get(self.baseurl + query, headers=headers)
|
authorization = (
|
||||||
|
"MediaBrowser , "
|
||||||
elif query_type == "post":
|
'Client="other", '
|
||||||
authorization = (
|
'Device="script", '
|
||||||
'MediaBrowser , '
|
'DeviceId="script", '
|
||||||
'Client="other", '
|
'Version="0.0.0"'
|
||||||
'Device="script", '
|
)
|
||||||
'DeviceId="script", '
|
headers["X-Emby-Authorization"] = authorization
|
||||||
'Version="0.0.0"'
|
|
||||||
)
|
if query_type == "get":
|
||||||
headers["X-Emby-Authorization"] = authorization
|
async with session.get(
|
||||||
response = self.session.post(self.baseurl + query, headers=headers)
|
self.baseurl + query, headers=headers
|
||||||
|
) as response:
|
||||||
return response.json()
|
results = await response.json()
|
||||||
|
|
||||||
except Exception as e:
|
elif query_type == "post":
|
||||||
logger(f"Jellyfin: Query failed {e}", 2)
|
async with session.post(
|
||||||
raise Exception(e)
|
self.baseurl + query, headers=headers
|
||||||
|
) as response:
|
||||||
def get_users(self):
|
results = await response.json()
|
||||||
try:
|
|
||||||
users = {}
|
# append identifiers to results
|
||||||
|
if identifiers:
|
||||||
query = "/Users"
|
results["Identifiers"] = identifiers
|
||||||
response = self.query(query, "get")
|
return results
|
||||||
|
|
||||||
# If reponse is not empty
|
except Exception as e:
|
||||||
if response:
|
logger(f"Jellyfin: Query failed {e}", 2)
|
||||||
for user in response:
|
raise Exception(e)
|
||||||
users[user["Name"]] = user["Id"]
|
|
||||||
|
async def get_users(self):
|
||||||
return users
|
try:
|
||||||
except Exception as e:
|
users = {}
|
||||||
logger(f"Jellyfin: Get users failed {e}", 2)
|
|
||||||
raise Exception(e)
|
query_string = "/Users"
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
def get_user_watched(self, user_name, user_id, library_type, library_id, library_title):
|
response = await self.query(query_string, "get", session)
|
||||||
try:
|
|
||||||
user_name = user_name.lower()
|
# If reponse is not empty
|
||||||
user_watched = {}
|
if response:
|
||||||
user_watched[user_name] = {}
|
for user in response:
|
||||||
|
users[user["Name"]] = user["Id"]
|
||||||
logger(f"Jellyfin: Generating watched for {user_name} in library {library_title}", 0)
|
|
||||||
# Movies
|
return users
|
||||||
if library_type == "Movie":
|
except Exception as e:
|
||||||
user_watched[user_name][library_title] = []
|
logger(f"Jellyfin: Get users failed {e}", 2)
|
||||||
watched = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=true&ParentId={library_id}&Filters=IsPlayed&Fields=ItemCounts,ProviderIds,MediaSources", "get")
|
raise Exception(e)
|
||||||
for movie in watched["Items"]:
|
|
||||||
if movie["UserData"]["Played"] == True:
|
async def get_user_watched(
|
||||||
movie_guids = {}
|
self, user_name, user_id, library_type, library_id, library_title
|
||||||
movie_guids["title"] = movie["Name"]
|
):
|
||||||
if movie["ProviderIds"]:
|
try:
|
||||||
# Lowercase movie["ProviderIds"] keys
|
user_name = user_name.lower()
|
||||||
movie_guids = {k.lower(): v for k, v in movie["ProviderIds"].items()}
|
user_watched = {}
|
||||||
if movie["MediaSources"]:
|
user_watched[user_name] = {}
|
||||||
movie_guids["locations"] = tuple([x["Path"].split("/")[-1] for x in movie["MediaSources"]])
|
|
||||||
user_watched[user_name][library_title].append(movie_guids)
|
logger(
|
||||||
|
f"Jellyfin: Generating watched for {user_name} in library {library_title}",
|
||||||
# TV Shows
|
0,
|
||||||
if library_type == "Episode":
|
)
|
||||||
user_watched[user_name][library_title] = {}
|
# Movies
|
||||||
watched = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=true&ParentId={library_id}&Fields=ItemCounts,ProviderIds,Path", "get")
|
async with aiohttp.ClientSession() as session:
|
||||||
watched_shows = [x for x in watched["Items"] if x["Type"] == "Series"]
|
if library_type == "Movie":
|
||||||
|
user_watched[user_name][library_title] = []
|
||||||
for show in watched_shows:
|
watched = await self.query(
|
||||||
show_guids = {k.lower(): v for k, v in show["ProviderIds"].items()}
|
f"/Users/{user_id}/Items"
|
||||||
show_guids["title"] = show["Name"]
|
+ f"?ParentId={library_id}&Filters=IsPlayed&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||||
show_guids["locations"] = tuple([show["Path"].split("/")[-1]])
|
"get",
|
||||||
show_guids = frozenset(show_guids.items())
|
session,
|
||||||
seasons = self.query(f"/Shows/{show['Id']}/Seasons?userId={user_id}&Fields=ItemCounts,ProviderIds", "get")
|
)
|
||||||
if len(seasons["Items"]) > 0:
|
for movie in watched["Items"]:
|
||||||
for season in seasons["Items"]:
|
if movie["UserData"]["Played"] is True:
|
||||||
episodes = self.query(f"/Shows/{show['Id']}/Episodes?seasonId={season['Id']}&userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources", "get")
|
movie_guids = {}
|
||||||
if len(episodes["Items"]) > 0:
|
movie_guids["title"] = movie["Name"]
|
||||||
for episode in episodes["Items"]:
|
if "ProviderIds" in movie:
|
||||||
if episode["UserData"]["Played"] == True:
|
# Lowercase movie["ProviderIds"] keys
|
||||||
if episode["ProviderIds"] or episode["MediaSources"]:
|
movie_guids = {
|
||||||
if show_guids not in user_watched[user_name][library_title]:
|
k.lower(): v
|
||||||
user_watched[user_name][library_title][show_guids] = {}
|
for k, v in movie["ProviderIds"].items()
|
||||||
if season["Name"] not in user_watched[user_name][library_title][show_guids]:
|
}
|
||||||
user_watched[user_name][library_title][show_guids][season["Name"]] = []
|
if "MediaSources" in movie:
|
||||||
|
movie_guids["locations"] = tuple(
|
||||||
# Lowercase episode["ProviderIds"] keys
|
[
|
||||||
episode_guids = {}
|
x["Path"].split("/")[-1]
|
||||||
if episode["ProviderIds"]:
|
for x in movie["MediaSources"]
|
||||||
episode_guids = {k.lower(): v for k, v in episode["ProviderIds"].items()}
|
]
|
||||||
if episode["MediaSources"]:
|
)
|
||||||
episode_guids["locations"] = tuple([x["Path"].split("/")[-1] for x in episode["MediaSources"]])
|
user_watched[user_name][library_title].append(movie_guids)
|
||||||
user_watched[user_name][library_title][show_guids][season["Name"]].append(episode_guids)
|
|
||||||
|
# TV Shows
|
||||||
return user_watched
|
if library_type == "Series":
|
||||||
except Exception as e:
|
user_watched[user_name][library_title] = {}
|
||||||
logger(f"Jellyfin: Failed to get watched for {user_name} in library {library_title}, Error: {e}", 2)
|
watched_shows = await self.query(
|
||||||
raise Exception(e)
|
f"/Users/{user_id}/Items"
|
||||||
|
+ f"?ParentId={library_id}&isPlaceHolder=false&Fields=ProviderIds,Path,RecursiveItemCount",
|
||||||
|
"get",
|
||||||
def get_watched(self, users, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping=None):
|
session,
|
||||||
try:
|
)
|
||||||
users_watched = {}
|
watched_shows_filtered = []
|
||||||
args = []
|
for show in watched_shows["Items"]:
|
||||||
|
if "PlayedPercentage" in show["UserData"]:
|
||||||
for user_name, user_id in users.items():
|
if show["UserData"]["PlayedPercentage"] > 0:
|
||||||
# Get all libraries
|
watched_shows_filtered.append(show)
|
||||||
user_name = user_name.lower()
|
seasons_tasks = []
|
||||||
|
for show in watched_shows_filtered:
|
||||||
libraries = self.query(f"/Users/{user_id}/Views", "get")["Items"]
|
show_guids = {
|
||||||
|
k.lower(): v for k, v in show["ProviderIds"].items()
|
||||||
for library in libraries:
|
}
|
||||||
library_title = library["Name"]
|
show_guids["title"] = show["Name"]
|
||||||
library_id = library["Id"]
|
show_guids["locations"] = tuple([show["Path"].split("/")[-1]])
|
||||||
watched = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=true&ParentId={library_id}&Filters=IsPlayed&limit=1", "get")
|
show_guids = frozenset(show_guids.items())
|
||||||
|
identifiers = {"show_guids": show_guids, "show_id": show["Id"]}
|
||||||
if len(watched["Items"]) == 0:
|
task = asyncio.ensure_future(
|
||||||
logger(f"Jellyfin: No watched items found in library {library_title}", 1)
|
self.query(
|
||||||
continue
|
f"/Shows/{show['Id']}/Seasons"
|
||||||
else:
|
+ f"?userId={user_id}&isPlaceHolder=false&Fields=ProviderIds,RecursiveItemCount",
|
||||||
library_type = watched["Items"][0]["Type"]
|
"get",
|
||||||
|
session,
|
||||||
skip_reason = check_skip_logic(library_title, library_type, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping)
|
frozenset(identifiers.items()),
|
||||||
|
)
|
||||||
if skip_reason:
|
)
|
||||||
logger(f"Jellyfin: Skipping library {library_title} {skip_reason}", 1)
|
seasons_tasks.append(task)
|
||||||
continue
|
|
||||||
|
seasons_watched = await asyncio.gather(*seasons_tasks)
|
||||||
args.append([self.get_user_watched, user_name, user_id, library_type, library_id, library_title])
|
seasons_watched_filtered = []
|
||||||
|
|
||||||
for user_watched in future_thread_executor(args):
|
for seasons in seasons_watched:
|
||||||
for user, user_watched_temp in user_watched.items():
|
seasons_watched_filtered_dict = {}
|
||||||
if user not in users_watched:
|
seasons_watched_filtered_dict["Identifiers"] = seasons[
|
||||||
users_watched[user] = {}
|
"Identifiers"
|
||||||
users_watched[user].update(user_watched_temp)
|
]
|
||||||
|
seasons_watched_filtered_dict["Items"] = []
|
||||||
return users_watched
|
for season in seasons["Items"]:
|
||||||
except Exception as e:
|
if "PlayedPercentage" in season["UserData"]:
|
||||||
logger(f"Jellyfin: Failed to get watched, Error: {e}", 2)
|
if season["UserData"]["PlayedPercentage"] > 0:
|
||||||
raise Exception(e)
|
seasons_watched_filtered_dict["Items"].append(
|
||||||
|
season
|
||||||
def update_user_watched(self, user_name, user_id, library, library_id, videos, dryrun):
|
)
|
||||||
try:
|
|
||||||
logger(f"Jellyfin: Updating watched for {user_name} in library {library}", 1)
|
if seasons_watched_filtered_dict["Items"]:
|
||||||
videos_shows_ids, videos_episodes_ids, videos_movies_ids = generate_library_guids_dict(videos)
|
seasons_watched_filtered.append(
|
||||||
|
seasons_watched_filtered_dict
|
||||||
logger(f"Jellyfin: mark list\nShows: {videos_shows_ids}\nEpisodes: {videos_episodes_ids}\nMovies: {videos_movies_ids}", 1)
|
)
|
||||||
|
|
||||||
if videos_movies_ids:
|
episodes_tasks = []
|
||||||
jellyfin_search = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=false&ParentId={library_id}&isPlayed=false&Fields=ItemCounts,ProviderIds,MediaSources", "get")
|
for seasons in seasons_watched_filtered:
|
||||||
for jellyfin_video in jellyfin_search["Items"]:
|
if len(seasons["Items"]) > 0:
|
||||||
movie_found = False
|
for season in seasons["Items"]:
|
||||||
|
season_identifiers = dict(seasons["Identifiers"])
|
||||||
if "MediaSources" in jellyfin_video:
|
season_identifiers["season_id"] = season["Id"]
|
||||||
for movie_location in jellyfin_video["MediaSources"]:
|
season_identifiers["season_name"] = season["Name"]
|
||||||
if movie_location["Path"].split("/")[-1] in videos_movies_ids["locations"]:
|
task = asyncio.ensure_future(
|
||||||
movie_found = True
|
self.query(
|
||||||
break
|
f"/Shows/{season_identifiers['show_id']}/Episodes"
|
||||||
|
+ f"?seasonId={season['Id']}&userId={user_id}&isPlaceHolder=false&isPlayed=true&Fields=ProviderIds,MediaSources",
|
||||||
if not movie_found:
|
"get",
|
||||||
for movie_provider_source, movie_provider_id in jellyfin_video["ProviderIds"].items():
|
session,
|
||||||
if movie_provider_source.lower() in videos_movies_ids:
|
frozenset(season_identifiers.items()),
|
||||||
if movie_provider_id.lower() in videos_movies_ids[movie_provider_source.lower()]:
|
)
|
||||||
movie_found = True
|
)
|
||||||
break
|
episodes_tasks.append(task)
|
||||||
|
|
||||||
if movie_found:
|
watched_episodes = await asyncio.gather(*episodes_tasks)
|
||||||
jellyfin_video_id = jellyfin_video["Id"]
|
for episodes in watched_episodes:
|
||||||
msg = f"{jellyfin_video['Name']} as watched for {user_name} in {library} for Jellyfin"
|
if len(episodes["Items"]) > 0:
|
||||||
if not dryrun:
|
for episode in episodes["Items"]:
|
||||||
logger(f"Marking {msg}", 0)
|
if episode["UserData"]["Played"] is True:
|
||||||
self.query(f"/Users/{user_id}/PlayedItems/{jellyfin_video_id}", "post")
|
if (
|
||||||
else:
|
"ProviderIds" in episode
|
||||||
logger(f"Dryrun {msg}", 0)
|
or "MediaSources" in episode
|
||||||
else:
|
):
|
||||||
logger(f"Jellyfin: Skipping movie {jellyfin_video['Name']} as it is not in mark list for {user_name}", 1)
|
episode_identifiers = dict(
|
||||||
|
episodes["Identifiers"]
|
||||||
|
)
|
||||||
|
show_guids = episode_identifiers["show_guids"]
|
||||||
# TV Shows
|
if (
|
||||||
if videos_shows_ids and videos_episodes_ids:
|
show_guids
|
||||||
jellyfin_search = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=false&ParentId={library_id}&isPlayed=false&Fields=ItemCounts,ProviderIds,Path", "get")
|
not in user_watched[user_name][
|
||||||
jellyfin_shows = [x for x in jellyfin_search["Items"]]
|
library_title
|
||||||
|
]
|
||||||
for jellyfin_show in jellyfin_shows:
|
):
|
||||||
show_found = False
|
user_watched[user_name][library_title][
|
||||||
|
show_guids
|
||||||
if "Path" in jellyfin_show:
|
] = {}
|
||||||
if jellyfin_show["Path"].split("/")[-1] in videos_shows_ids["locations"]:
|
if (
|
||||||
show_found = True
|
episode_identifiers["season_name"]
|
||||||
|
not in user_watched[user_name][
|
||||||
if not show_found:
|
library_title
|
||||||
for show_provider_source, show_provider_id in jellyfin_show["ProviderIds"].items():
|
][show_guids]
|
||||||
if show_provider_source.lower() in videos_shows_ids:
|
):
|
||||||
if show_provider_id.lower() in videos_shows_ids[show_provider_source.lower()]:
|
user_watched[user_name][library_title][
|
||||||
show_found = True
|
show_guids
|
||||||
break
|
][episode_identifiers["season_name"]] = []
|
||||||
|
|
||||||
if show_found:
|
episode_guids = {}
|
||||||
logger(f"Jellyfin: Updating watched for {user_name} in library {library} for show {jellyfin_show['Name']}", 1)
|
if "ProviderIds" in episode:
|
||||||
jellyfin_show_id = jellyfin_show["Id"]
|
episode_guids = {
|
||||||
jellyfin_episodes = self.query(f"/Shows/{jellyfin_show_id}/Episodes?userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources", "get")
|
k.lower(): v
|
||||||
|
for k, v in episode[
|
||||||
for jellyfin_episode in jellyfin_episodes["Items"]:
|
"ProviderIds"
|
||||||
episode_found = False
|
].items()
|
||||||
|
}
|
||||||
if "MediaSources" in jellyfin_episode:
|
if "MediaSources" in episode:
|
||||||
for episode_location in jellyfin_episode["MediaSources"]:
|
episode_guids["locations"] = tuple(
|
||||||
if episode_location["Path"].split("/")[-1] in videos_episodes_ids["locations"]:
|
[
|
||||||
episode_found = True
|
x["Path"].split("/")[-1]
|
||||||
break
|
for x in episode["MediaSources"]
|
||||||
|
]
|
||||||
if not episode_found:
|
)
|
||||||
for episode_provider_source, episode_provider_id in jellyfin_episode["ProviderIds"].items():
|
user_watched[user_name][library_title][
|
||||||
if episode_provider_source.lower() in videos_episodes_ids:
|
show_guids
|
||||||
if episode_provider_id.lower() in videos_episodes_ids[episode_provider_source.lower()]:
|
][episode_identifiers["season_name"]].append(
|
||||||
episode_found = True
|
episode_guids
|
||||||
break
|
)
|
||||||
|
|
||||||
if episode_found:
|
return user_watched
|
||||||
jellyfin_episode_id = jellyfin_episode["Id"]
|
except Exception as e:
|
||||||
msg = f"{jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode['Name']} as watched for {user_name} in {library} for Jellyfin"
|
logger(
|
||||||
if not dryrun:
|
f"Jellyfin: Failed to get watched for {user_name} in library {library_title}, Error: {e}",
|
||||||
logger(f"Marked {msg}", 0)
|
2,
|
||||||
self.query(f"/Users/{user_id}/PlayedItems/{jellyfin_episode_id}", "post")
|
)
|
||||||
else:
|
raise Exception(e)
|
||||||
logger(f"Dryrun {msg}", 0)
|
|
||||||
else:
|
async def get_users_watched(
|
||||||
logger(f"Jellyfin: Skipping episode {jellyfin_episode['Name']} as it is not in mark list for {user_name}", 1)
|
self,
|
||||||
else:
|
user_name,
|
||||||
logger(f"Jellyfin: Skipping show {jellyfin_show['Name']} as it is not in mark list for {user_name}", 1)
|
user_id,
|
||||||
|
blacklist_library,
|
||||||
if not videos_movies_ids and not videos_shows_ids and not videos_episodes_ids:
|
whitelist_library,
|
||||||
logger(f"Jellyfin: No videos to mark as watched for {user_name} in library {library}", 1)
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
except Exception as e:
|
library_mapping,
|
||||||
logger(f"Jellyfin: Error updating watched for {user_name} in library {library}", 2)
|
):
|
||||||
raise Exception(e)
|
try:
|
||||||
|
# Get all libraries
|
||||||
|
user_name = user_name.lower()
|
||||||
def update_watched(self, watched_list, user_mapping=None, library_mapping=None, dryrun=False):
|
tasks_watched = []
|
||||||
try:
|
|
||||||
args = []
|
tasks_libraries = []
|
||||||
for user, libraries in watched_list.items():
|
async with aiohttp.ClientSession() as session:
|
||||||
logger(f"Jellyfin: Updating for entry {user}, {libraries}", 1)
|
libraries = await self.query(f"/Users/{user_id}/Views", "get", session)
|
||||||
user_other = None
|
for library in libraries["Items"]:
|
||||||
user_name = None
|
library_id = library["Id"]
|
||||||
if user_mapping:
|
library_title = library["Name"]
|
||||||
if user in user_mapping.keys():
|
identifiers = {
|
||||||
user_other = user_mapping[user]
|
"library_id": library_id,
|
||||||
elif user in user_mapping.values():
|
"library_title": library_title,
|
||||||
user_other = search_mapping(user_mapping, user)
|
}
|
||||||
|
task = asyncio.ensure_future(
|
||||||
user_id = None
|
self.query(
|
||||||
for key in self.users.keys():
|
f"/Users/{user_id}/Items"
|
||||||
if user.lower() == key.lower():
|
+ f"?ParentId={library_id}&Filters=IsPlayed&limit=1",
|
||||||
user_id = self.users[key]
|
"get",
|
||||||
user_name = key
|
session,
|
||||||
break
|
identifiers=identifiers,
|
||||||
elif user_other and user_other.lower() == key.lower():
|
)
|
||||||
user_id = self.users[key]
|
)
|
||||||
user_name = key
|
tasks_libraries.append(task)
|
||||||
break
|
|
||||||
|
libraries = await asyncio.gather(
|
||||||
if not user_id:
|
*tasks_libraries, return_exceptions=True
|
||||||
logger(f"{user} {user_other} not found in Jellyfin", 2)
|
)
|
||||||
continue
|
|
||||||
|
for watched in libraries:
|
||||||
jellyfin_libraries = self.query(f"/Users/{user_id}/Views", "get")["Items"]
|
if len(watched["Items"]) == 0:
|
||||||
|
continue
|
||||||
for library, videos in libraries.items():
|
|
||||||
library_other = None
|
library_id = watched["Identifiers"]["library_id"]
|
||||||
if library_mapping:
|
library_title = watched["Identifiers"]["library_title"]
|
||||||
if library in library_mapping.keys():
|
library_type = watched["Items"][0]["Type"]
|
||||||
library_other = library_mapping[library]
|
|
||||||
elif library in library_mapping.values():
|
skip_reason = check_skip_logic(
|
||||||
library_other = search_mapping(library_mapping, library)
|
library_title,
|
||||||
|
library_type,
|
||||||
|
blacklist_library,
|
||||||
if library.lower() not in [x["Name"].lower() for x in jellyfin_libraries]:
|
whitelist_library,
|
||||||
if library_other:
|
blacklist_library_type,
|
||||||
if library_other.lower() in [x["Name"].lower() for x in jellyfin_libraries]:
|
whitelist_library_type,
|
||||||
logger(f"Jellyfin: Library {library} not found, but {library_other} found, using {library_other}", 1)
|
library_mapping,
|
||||||
library = library_other
|
)
|
||||||
else:
|
|
||||||
logger(f"Jellyfin: Library {library} or {library_other} not found in library list", 2)
|
if skip_reason:
|
||||||
continue
|
logger(
|
||||||
else:
|
f"Jellyfin: Skipping library {library_title} {skip_reason}",
|
||||||
logger(f"Jellyfin: Library {library} not found in library list", 2)
|
1,
|
||||||
continue
|
)
|
||||||
|
continue
|
||||||
library_id = None
|
|
||||||
for jellyfin_library in jellyfin_libraries:
|
# Get watched for user
|
||||||
if jellyfin_library["Name"] == library:
|
task = asyncio.ensure_future(
|
||||||
library_id = jellyfin_library["Id"]
|
self.get_user_watched(
|
||||||
continue
|
user_name, user_id, library_type, library_id, library_title
|
||||||
|
)
|
||||||
if library_id:
|
)
|
||||||
args.append([self.update_user_watched, user_name, user_id, library, library_id, videos, dryrun])
|
tasks_watched.append(task)
|
||||||
|
|
||||||
future_thread_executor(args)
|
watched = await asyncio.gather(*tasks_watched, return_exceptions=True)
|
||||||
except Exception as e:
|
return watched
|
||||||
logger(f"Jellyfin: Error updating watched", 2)
|
except Exception as e:
|
||||||
raise Exception(e)
|
logger(f"Jellyfin: Failed to get users watched, Error: {e}", 2)
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
async def get_watched(
|
||||||
|
self,
|
||||||
|
users,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping=None,
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
users_watched = {}
|
||||||
|
watched = []
|
||||||
|
|
||||||
|
for user_name, user_id in users.items():
|
||||||
|
watched.append(
|
||||||
|
await self.get_users_watched(
|
||||||
|
user_name,
|
||||||
|
user_id,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for user_watched in watched:
|
||||||
|
user_watched_temp = combine_watched_dicts(user_watched)
|
||||||
|
for user, user_watched_temp in user_watched_temp.items():
|
||||||
|
if user not in users_watched:
|
||||||
|
users_watched[user] = {}
|
||||||
|
users_watched[user].update(user_watched_temp)
|
||||||
|
|
||||||
|
return users_watched
|
||||||
|
except Exception as e:
|
||||||
|
logger(f"Jellyfin: Failed to get watched, Error: {e}", 2)
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
async def update_user_watched(
|
||||||
|
self, user_name, user_id, library, library_id, videos, dryrun
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Updating watched for {user_name} in library {library}", 1
|
||||||
|
)
|
||||||
|
(
|
||||||
|
videos_shows_ids,
|
||||||
|
videos_episodes_ids,
|
||||||
|
videos_movies_ids,
|
||||||
|
) = generate_library_guids_dict(videos)
|
||||||
|
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: mark list\nShows: {videos_shows_ids}\nEpisodes: {videos_episodes_ids}\nMovies: {videos_movies_ids}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
if videos_movies_ids:
|
||||||
|
jellyfin_search = await self.query(
|
||||||
|
f"/Users/{user_id}/Items"
|
||||||
|
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=false&ParentId={library_id}"
|
||||||
|
+ "&isPlayed=false&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||||
|
"get",
|
||||||
|
session,
|
||||||
|
)
|
||||||
|
for jellyfin_video in jellyfin_search["Items"]:
|
||||||
|
movie_found = False
|
||||||
|
|
||||||
|
if "MediaSources" in jellyfin_video:
|
||||||
|
for movie_location in jellyfin_video["MediaSources"]:
|
||||||
|
if (
|
||||||
|
movie_location["Path"].split("/")[-1]
|
||||||
|
in videos_movies_ids["locations"]
|
||||||
|
):
|
||||||
|
movie_found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not movie_found:
|
||||||
|
for (
|
||||||
|
movie_provider_source,
|
||||||
|
movie_provider_id,
|
||||||
|
) in jellyfin_video["ProviderIds"].items():
|
||||||
|
if movie_provider_source.lower() in videos_movies_ids:
|
||||||
|
if (
|
||||||
|
movie_provider_id.lower()
|
||||||
|
in videos_movies_ids[
|
||||||
|
movie_provider_source.lower()
|
||||||
|
]
|
||||||
|
):
|
||||||
|
movie_found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if movie_found:
|
||||||
|
jellyfin_video_id = jellyfin_video["Id"]
|
||||||
|
msg = f"{jellyfin_video['Name']} as watched for {user_name} in {library} for Jellyfin"
|
||||||
|
if not dryrun:
|
||||||
|
logger(f"Marking {msg}", 0)
|
||||||
|
await self.query(
|
||||||
|
f"/Users/{user_id}/PlayedItems/{jellyfin_video_id}",
|
||||||
|
"post",
|
||||||
|
session,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger(f"Dryrun {msg}", 0)
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Skipping movie {jellyfin_video['Name']} as it is not in mark list for {user_name}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
|
||||||
|
# TV Shows
|
||||||
|
if videos_shows_ids and videos_episodes_ids:
|
||||||
|
jellyfin_search = await self.query(
|
||||||
|
f"/Users/{user_id}/Items"
|
||||||
|
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=false&ParentId={library_id}"
|
||||||
|
+ "&isPlayed=false&Fields=ItemCounts,ProviderIds,Path",
|
||||||
|
"get",
|
||||||
|
session,
|
||||||
|
)
|
||||||
|
jellyfin_shows = [x for x in jellyfin_search["Items"]]
|
||||||
|
|
||||||
|
for jellyfin_show in jellyfin_shows:
|
||||||
|
show_found = False
|
||||||
|
|
||||||
|
if "Path" in jellyfin_show:
|
||||||
|
if (
|
||||||
|
jellyfin_show["Path"].split("/")[-1]
|
||||||
|
in videos_shows_ids["locations"]
|
||||||
|
):
|
||||||
|
show_found = True
|
||||||
|
|
||||||
|
if not show_found:
|
||||||
|
for show_provider_source, show_provider_id in jellyfin_show[
|
||||||
|
"ProviderIds"
|
||||||
|
].items():
|
||||||
|
if show_provider_source.lower() in videos_shows_ids:
|
||||||
|
if (
|
||||||
|
show_provider_id.lower()
|
||||||
|
in videos_shows_ids[
|
||||||
|
show_provider_source.lower()
|
||||||
|
]
|
||||||
|
):
|
||||||
|
show_found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if show_found:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Updating watched for {user_name} in library {library} for show {jellyfin_show['Name']}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
jellyfin_show_id = jellyfin_show["Id"]
|
||||||
|
jellyfin_episodes = await self.query(
|
||||||
|
f"/Shows/{jellyfin_show_id}/Episodes"
|
||||||
|
+ f"?userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||||
|
"get",
|
||||||
|
session,
|
||||||
|
)
|
||||||
|
|
||||||
|
for jellyfin_episode in jellyfin_episodes["Items"]:
|
||||||
|
episode_found = False
|
||||||
|
|
||||||
|
if "MediaSources" in jellyfin_episode:
|
||||||
|
for episode_location in jellyfin_episode[
|
||||||
|
"MediaSources"
|
||||||
|
]:
|
||||||
|
if (
|
||||||
|
episode_location["Path"].split("/")[-1]
|
||||||
|
in videos_episodes_ids["locations"]
|
||||||
|
):
|
||||||
|
episode_found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not episode_found:
|
||||||
|
for (
|
||||||
|
episode_provider_source,
|
||||||
|
episode_provider_id,
|
||||||
|
) in jellyfin_episode["ProviderIds"].items():
|
||||||
|
if (
|
||||||
|
episode_provider_source.lower()
|
||||||
|
in videos_episodes_ids
|
||||||
|
):
|
||||||
|
if (
|
||||||
|
episode_provider_id.lower()
|
||||||
|
in videos_episodes_ids[
|
||||||
|
episode_provider_source.lower()
|
||||||
|
]
|
||||||
|
):
|
||||||
|
episode_found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if episode_found:
|
||||||
|
jellyfin_episode_id = jellyfin_episode["Id"]
|
||||||
|
msg = (
|
||||||
|
f"{jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode['Name']}"
|
||||||
|
+ f" as watched for {user_name} in {library} for Jellyfin"
|
||||||
|
)
|
||||||
|
if not dryrun:
|
||||||
|
logger(f"Marked {msg}", 0)
|
||||||
|
await self.query(
|
||||||
|
f"/Users/{user_id}/PlayedItems/{jellyfin_episode_id}",
|
||||||
|
"post",
|
||||||
|
session,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger(f"Dryrun {msg}", 0)
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Skipping episode {jellyfin_episode['Name']} as it is not in mark list for {user_name}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Skipping show {jellyfin_show['Name']} as it is not in mark list for {user_name}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
not videos_movies_ids
|
||||||
|
and not videos_shows_ids
|
||||||
|
and not videos_episodes_ids
|
||||||
|
):
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: No videos to mark as watched for {user_name} in library {library}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Error updating watched for {user_name} in library {library}, {e}",
|
||||||
|
2,
|
||||||
|
)
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
async def update_watched(
|
||||||
|
self, watched_list, user_mapping=None, library_mapping=None, dryrun=False
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
tasks = []
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
for user, libraries in watched_list.items():
|
||||||
|
logger(f"Jellyfin: Updating for entry {user}, {libraries}", 1)
|
||||||
|
user_other = None
|
||||||
|
user_name = None
|
||||||
|
if user_mapping:
|
||||||
|
if user in user_mapping.keys():
|
||||||
|
user_other = user_mapping[user]
|
||||||
|
elif user in user_mapping.values():
|
||||||
|
user_other = search_mapping(user_mapping, user)
|
||||||
|
|
||||||
|
user_id = None
|
||||||
|
for key in self.users.keys():
|
||||||
|
if user.lower() == key.lower():
|
||||||
|
user_id = self.users[key]
|
||||||
|
user_name = key
|
||||||
|
break
|
||||||
|
elif user_other and user_other.lower() == key.lower():
|
||||||
|
user_id = self.users[key]
|
||||||
|
user_name = key
|
||||||
|
break
|
||||||
|
|
||||||
|
if not user_id:
|
||||||
|
logger(f"{user} {user_other} not found in Jellyfin", 2)
|
||||||
|
continue
|
||||||
|
|
||||||
|
jellyfin_libraries = await self.query(
|
||||||
|
f"/Users/{user_id}/Views", "get", session
|
||||||
|
)
|
||||||
|
jellyfin_libraries = [x for x in jellyfin_libraries["Items"]]
|
||||||
|
|
||||||
|
for library, videos in libraries.items():
|
||||||
|
library_other = None
|
||||||
|
if library_mapping:
|
||||||
|
if library in library_mapping.keys():
|
||||||
|
library_other = library_mapping[library]
|
||||||
|
elif library in library_mapping.values():
|
||||||
|
library_other = search_mapping(library_mapping, library)
|
||||||
|
|
||||||
|
if library.lower() not in [
|
||||||
|
x["Name"].lower() for x in jellyfin_libraries
|
||||||
|
]:
|
||||||
|
if library_other:
|
||||||
|
if library_other.lower() in [
|
||||||
|
x["Name"].lower() for x in jellyfin_libraries
|
||||||
|
]:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Library {library} not found, but {library_other} found, using {library_other}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
library = library_other
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Library {library} or {library_other} not found in library list",
|
||||||
|
2,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Library {library} not found in library list",
|
||||||
|
2,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
library_id = None
|
||||||
|
for jellyfin_library in jellyfin_libraries:
|
||||||
|
if jellyfin_library["Name"] == library:
|
||||||
|
library_id = jellyfin_library["Id"]
|
||||||
|
continue
|
||||||
|
|
||||||
|
if library_id:
|
||||||
|
task = self.update_user_watched(
|
||||||
|
user_name, user_id, library, library_id, videos, dryrun
|
||||||
|
)
|
||||||
|
tasks.append(task)
|
||||||
|
|
||||||
|
await asyncio.gather(*tasks, return_exceptions=True)
|
||||||
|
except Exception as e:
|
||||||
|
logger(f"Jellyfin: Error updating watched, {e}", 2)
|
||||||
|
raise Exception(e)
|
||||||
|
|||||||
1103
src/main.py
1103
src/main.py
File diff suppressed because it is too large
Load Diff
753
src/plex.py
753
src/plex.py
@@ -1,312 +1,441 @@
|
|||||||
import re, requests
|
import re, requests
|
||||||
|
from urllib3.poolmanager import PoolManager
|
||||||
from plexapi.server import PlexServer
|
|
||||||
from plexapi.myplex import MyPlexAccount
|
from plexapi.server import PlexServer
|
||||||
|
from plexapi.myplex import MyPlexAccount
|
||||||
from src.functions import logger, search_mapping, check_skip_logic, generate_library_guids_dict, future_thread_executor
|
|
||||||
|
from src.functions import (
|
||||||
|
logger,
|
||||||
# class plex accept base url and token and username and password but default with none
|
search_mapping,
|
||||||
class Plex:
|
check_skip_logic,
|
||||||
def __init__(self, baseurl=None, token=None, username=None, password=None, servername=None, ssl_bypass=False):
|
generate_library_guids_dict,
|
||||||
self.baseurl = baseurl
|
future_thread_executor,
|
||||||
self.token = token
|
)
|
||||||
self.username = username
|
|
||||||
self.password = password
|
# Bypass hostname validation for ssl. Taken from https://github.com/pkkid/python-plexapi/issues/143#issuecomment-775485186
|
||||||
self.servername = servername
|
class HostNameIgnoringAdapter(requests.adapters.HTTPAdapter):
|
||||||
self.plex = self.login()
|
def init_poolmanager(self, connections, maxsize, block=..., **pool_kwargs):
|
||||||
self.admin_user = self.plex.myPlexAccount()
|
self.poolmanager = PoolManager(num_pools=connections,
|
||||||
self.users = self.get_users()
|
maxsize=maxsize,
|
||||||
|
block=block,
|
||||||
def login(self):
|
assert_hostname=False,
|
||||||
try:
|
**pool_kwargs)
|
||||||
if self.baseurl and self.token:
|
|
||||||
# Login via token
|
def get_user_watched(user, user_plex, library):
|
||||||
plex = PlexServer(self.baseurl, self.token)
|
try:
|
||||||
elif self.username and self.password and self.servername:
|
user_name = user.title.lower()
|
||||||
# Login via plex account
|
user_watched = {}
|
||||||
account = MyPlexAccount(self.username, self.password)
|
user_watched[user_name] = {}
|
||||||
plex = account.resource(self.servername).connect()
|
|
||||||
else:
|
logger(
|
||||||
raise Exception("No complete plex credentials provided")
|
f"Plex: Generating watched for {user_name} in library {library.title}",
|
||||||
|
0,
|
||||||
return plex
|
)
|
||||||
except Exception as e:
|
|
||||||
if self.username or self.password:
|
if library.type == "movie":
|
||||||
msg = f"Failed to login via plex account {self.username}"
|
user_watched[user_name][library.title] = []
|
||||||
logger(f"Plex: Failed to login, {msg}, Error: {e}", 2)
|
|
||||||
else:
|
library_videos = user_plex.library.section(library.title)
|
||||||
logger(f"Plex: Failed to login, Error: {e}", 2)
|
for video in library_videos.search(unwatched=False):
|
||||||
raise Exception(e)
|
movie_guids = {}
|
||||||
|
for guid in video.guids:
|
||||||
|
guid_source = re.search(r"(.*)://", guid.id).group(1).lower()
|
||||||
def get_users(self):
|
guid_id = re.search(r"://(.*)", guid.id).group(1)
|
||||||
try:
|
movie_guids[guid_source] = guid_id
|
||||||
users = self.plex.myPlexAccount().users()
|
|
||||||
|
movie_guids["title"] = video.title
|
||||||
# append self to users
|
movie_guids["locations"] = tuple(
|
||||||
users.append(self.plex.myPlexAccount())
|
[x.split("/")[-1] for x in video.locations]
|
||||||
|
)
|
||||||
return users
|
|
||||||
except Exception as e:
|
user_watched[user_name][library.title].append(movie_guids)
|
||||||
logger(f"Plex: Failed to get users, Error: {e}", 2)
|
|
||||||
raise Exception(e)
|
elif library.type == "show":
|
||||||
|
user_watched[user_name][library.title] = {}
|
||||||
def get_user_watched(self, user, user_plex, library):
|
|
||||||
try:
|
library_videos = user_plex.library.section(library.title)
|
||||||
user_name = user.title.lower()
|
for show in library_videos.search(unwatched=False):
|
||||||
user_watched = {}
|
show_guids = {}
|
||||||
user_watched[user_name] = {}
|
for show_guid in show.guids:
|
||||||
|
# Extract after :// from guid.id
|
||||||
logger(f"Plex: Generating watched for {user_name} in library {library.title}", 0)
|
show_guid_source = (
|
||||||
|
re.search(r"(.*)://", show_guid.id).group(1).lower()
|
||||||
if library.type == "movie":
|
)
|
||||||
user_watched[user_name][library.title] = []
|
show_guid_id = re.search(r"://(.*)", show_guid.id).group(1)
|
||||||
|
show_guids[show_guid_source] = show_guid_id
|
||||||
library_videos = user_plex.library.section(library.title)
|
|
||||||
for video in library_videos.search(unwatched=False):
|
show_guids["title"] = show.title
|
||||||
movie_guids = {}
|
show_guids["locations"] = tuple(
|
||||||
for guid in video.guids:
|
[x.split("/")[-1] for x in show.locations]
|
||||||
guid_source = re.search(r'(.*)://', guid.id).group(1).lower()
|
)
|
||||||
guid_id = re.search(r'://(.*)', guid.id).group(1)
|
show_guids = frozenset(show_guids.items())
|
||||||
movie_guids[guid_source] = guid_id
|
|
||||||
|
for season in show.seasons():
|
||||||
movie_guids["title"] = video.title
|
episode_guids = []
|
||||||
movie_guids["locations"] = tuple([x.split("/")[-1] for x in video.locations])
|
for episode in season.episodes():
|
||||||
|
if episode.viewCount > 0:
|
||||||
user_watched[user_name][library.title].append(movie_guids)
|
episode_guids_temp = {}
|
||||||
|
for guid in episode.guids:
|
||||||
elif library.type == "show":
|
# Extract after :// from guid.id
|
||||||
user_watched[user_name][library.title] = {}
|
guid_source = (
|
||||||
|
re.search(r"(.*)://", guid.id).group(1).lower()
|
||||||
library_videos = user_plex.library.section(library.title)
|
)
|
||||||
for show in library_videos.search(unwatched=False):
|
guid_id = re.search(r"://(.*)", guid.id).group(1)
|
||||||
show_guids = {}
|
episode_guids_temp[guid_source] = guid_id
|
||||||
for show_guid in show.guids:
|
|
||||||
# Extract after :// from guid.id
|
episode_guids_temp["locations"] = tuple(
|
||||||
show_guid_source = re.search(r'(.*)://', show_guid.id).group(1).lower()
|
[x.split("/")[-1] for x in episode.locations]
|
||||||
show_guid_id = re.search(r'://(.*)', show_guid.id).group(1)
|
)
|
||||||
show_guids[show_guid_source] = show_guid_id
|
episode_guids.append(episode_guids_temp)
|
||||||
|
|
||||||
show_guids["title"] = show.title
|
if episode_guids:
|
||||||
show_guids["locations"] = tuple([x.split("/")[-1] for x in show.locations])
|
# append show, season, episode
|
||||||
show_guids = frozenset(show_guids.items())
|
if show_guids not in user_watched[user_name][library.title]:
|
||||||
|
user_watched[user_name][library.title][show_guids] = {}
|
||||||
for season in show.seasons():
|
if (
|
||||||
episode_guids = []
|
season.title
|
||||||
for episode in season.episodes():
|
not in user_watched[user_name][library.title][show_guids]
|
||||||
if episode.viewCount > 0:
|
):
|
||||||
episode_guids_temp = {}
|
user_watched[user_name][library.title][show_guids][
|
||||||
for guid in episode.guids:
|
season.title
|
||||||
# Extract after :// from guid.id
|
] = {}
|
||||||
guid_source = re.search(r'(.*)://', guid.id).group(1).lower()
|
user_watched[user_name][library.title][show_guids][
|
||||||
guid_id = re.search(r'://(.*)', guid.id).group(1)
|
season.title
|
||||||
episode_guids_temp[guid_source] = guid_id
|
] = episode_guids
|
||||||
|
|
||||||
episode_guids_temp["locations"] = tuple([x.split("/")[-1] for x in episode.locations])
|
return user_watched
|
||||||
episode_guids.append(episode_guids_temp)
|
except Exception as e:
|
||||||
|
logger(
|
||||||
if episode_guids:
|
f"Plex: Failed to get watched for {user_name} in library {library.title}, Error: {e}",
|
||||||
# append show, season, episode
|
2,
|
||||||
if show_guids not in user_watched[user_name][library.title]:
|
)
|
||||||
user_watched[user_name][library.title][show_guids] = {}
|
raise Exception(e)
|
||||||
if season.title not in user_watched[user_name][library.title][show_guids]:
|
|
||||||
user_watched[user_name][library.title][show_guids][season.title] = {}
|
|
||||||
user_watched[user_name][library.title][show_guids][season.title] = episode_guids
|
def update_user_watched(user, user_plex, library, videos, dryrun):
|
||||||
|
try:
|
||||||
|
logger(f"Plex: Updating watched for {user.title} in library {library}", 1)
|
||||||
return user_watched
|
(
|
||||||
except Exception as e:
|
videos_shows_ids,
|
||||||
logger(f"Plex: Failed to get watched for {user_name} in library {library.title}, Error: {e}", 2)
|
videos_episodes_ids,
|
||||||
raise Exception(e)
|
videos_movies_ids,
|
||||||
|
) = generate_library_guids_dict(videos)
|
||||||
|
logger(
|
||||||
def get_watched(self, users, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping):
|
f"Plex: mark list\nShows: {videos_shows_ids}\nEpisodes: {videos_episodes_ids}\nMovies: {videos_movies_ids}",
|
||||||
try:
|
1,
|
||||||
# Get all libraries
|
)
|
||||||
users_watched = {}
|
|
||||||
args = []
|
library_videos = user_plex.library.section(library)
|
||||||
|
if videos_movies_ids:
|
||||||
for user in users:
|
for movies_search in library_videos.search(unwatched=True):
|
||||||
if self.admin_user == user:
|
movie_found = False
|
||||||
user_plex = self.plex
|
for movie_location in movies_search.locations:
|
||||||
else:
|
if movie_location.split("/")[-1] in videos_movies_ids["locations"]:
|
||||||
user_plex = PlexServer(self.plex._baseurl, user.get_token(self.plex.machineIdentifier))
|
movie_found = True
|
||||||
|
break
|
||||||
libraries = user_plex.library.sections()
|
|
||||||
|
if not movie_found:
|
||||||
for library in libraries:
|
for movie_guid in movies_search.guids:
|
||||||
library_title = library.title
|
movie_guid_source = (
|
||||||
library_type = library.type
|
re.search(r"(.*)://", movie_guid.id).group(1).lower()
|
||||||
|
)
|
||||||
skip_reason = check_skip_logic(library_title, library_type, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping)
|
movie_guid_id = re.search(r"://(.*)", movie_guid.id).group(1)
|
||||||
|
|
||||||
if skip_reason:
|
# If movie provider source and movie provider id are in videos_movie_ids exactly, then the movie is in the list
|
||||||
logger(f"Plex: Skipping library {library_title} {skip_reason}", 1)
|
if movie_guid_source in videos_movies_ids.keys():
|
||||||
continue
|
if movie_guid_id in videos_movies_ids[movie_guid_source]:
|
||||||
|
movie_found = True
|
||||||
args.append([self.get_user_watched, user, user_plex, library])
|
break
|
||||||
|
|
||||||
for user_watched in future_thread_executor(args):
|
if movie_found:
|
||||||
for user, user_watched_temp in user_watched.items():
|
msg = f"{movies_search.title} as watched for {user.title} in {library} for Plex"
|
||||||
if user not in users_watched:
|
if not dryrun:
|
||||||
users_watched[user] = {}
|
logger(f"Marked {msg}", 0)
|
||||||
users_watched[user].update(user_watched_temp)
|
movies_search.markWatched()
|
||||||
|
else:
|
||||||
return users_watched
|
logger(f"Dryrun {msg}", 0)
|
||||||
except Exception as e:
|
else:
|
||||||
logger(f"Plex: Failed to get watched, Error: {e}", 2)
|
logger(
|
||||||
raise Exception(e)
|
f"Plex: Skipping movie {movies_search.title} as it is not in mark list for {user.title}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
def update_user_watched (self, user, user_plex, library, videos, dryrun):
|
|
||||||
try:
|
if videos_shows_ids and videos_episodes_ids:
|
||||||
logger(f"Plex: Updating watched for {user.title} in library {library}", 1)
|
for show_search in library_videos.search(unwatched=True):
|
||||||
videos_shows_ids, videos_episodes_ids, videos_movies_ids = generate_library_guids_dict(videos)
|
show_found = False
|
||||||
logger(f"Plex: mark list\nShows: {videos_shows_ids}\nEpisodes: {videos_episodes_ids}\nMovies: {videos_movies_ids}", 1)
|
for show_location in show_search.locations:
|
||||||
|
if show_location.split("/")[-1] in videos_shows_ids["locations"]:
|
||||||
library_videos = user_plex.library.section(library)
|
show_found = True
|
||||||
if videos_movies_ids:
|
break
|
||||||
for movies_search in library_videos.search(unwatched=True):
|
|
||||||
movie_found = False
|
if not show_found:
|
||||||
for movie_location in movies_search.locations:
|
for show_guid in show_search.guids:
|
||||||
if movie_location.split("/")[-1] in videos_movies_ids["locations"]:
|
show_guid_source = (
|
||||||
movie_found = True
|
re.search(r"(.*)://", show_guid.id).group(1).lower()
|
||||||
break
|
)
|
||||||
|
show_guid_id = re.search(r"://(.*)", show_guid.id).group(1)
|
||||||
if not movie_found:
|
|
||||||
for movie_guid in movies_search.guids:
|
# If show provider source and show provider id are in videos_shows_ids exactly, then the show is in the list
|
||||||
movie_guid_source = re.search(r'(.*)://', movie_guid.id).group(1).lower()
|
if show_guid_source in videos_shows_ids.keys():
|
||||||
movie_guid_id = re.search(r'://(.*)', movie_guid.id).group(1)
|
if show_guid_id in videos_shows_ids[show_guid_source]:
|
||||||
|
show_found = True
|
||||||
# If movie provider source and movie provider id are in videos_movie_ids exactly, then the movie is in the list
|
break
|
||||||
if movie_guid_source in videos_movies_ids.keys():
|
|
||||||
if movie_guid_id in videos_movies_ids[movie_guid_source]:
|
if show_found:
|
||||||
movie_found = True
|
for episode_search in show_search.episodes():
|
||||||
break
|
episode_found = False
|
||||||
|
|
||||||
if movie_found:
|
for episode_location in episode_search.locations:
|
||||||
msg = f"{movies_search.title} as watched for {user.title} in {library} for Plex"
|
if (
|
||||||
if not dryrun:
|
episode_location.split("/")[-1]
|
||||||
logger(f"Marked {msg}", 0)
|
in videos_episodes_ids["locations"]
|
||||||
movies_search.markWatched()
|
):
|
||||||
else:
|
episode_found = True
|
||||||
logger(f"Dryrun {msg}", 0)
|
break
|
||||||
else:
|
|
||||||
logger(f"Plex: Skipping movie {movies_search.title} as it is not in mark list for {user.title}", 1)
|
if not episode_found:
|
||||||
|
for episode_guid in episode_search.guids:
|
||||||
|
episode_guid_source = (
|
||||||
if videos_shows_ids and videos_episodes_ids:
|
re.search(r"(.*)://", episode_guid.id)
|
||||||
for show_search in library_videos.search(unwatched=True):
|
.group(1)
|
||||||
show_found = False
|
.lower()
|
||||||
for show_location in show_search.locations:
|
)
|
||||||
if show_location.split("/")[-1] in videos_shows_ids["locations"]:
|
episode_guid_id = re.search(
|
||||||
show_found = True
|
r"://(.*)", episode_guid.id
|
||||||
break
|
).group(1)
|
||||||
|
|
||||||
if not show_found:
|
# If episode provider source and episode provider id are in videos_episodes_ids exactly, then the episode is in the list
|
||||||
for show_guid in show_search.guids:
|
if episode_guid_source in videos_episodes_ids.keys():
|
||||||
show_guid_source = re.search(r'(.*)://', show_guid.id).group(1).lower()
|
if (
|
||||||
show_guid_id = re.search(r'://(.*)', show_guid.id).group(1)
|
episode_guid_id
|
||||||
|
in videos_episodes_ids[episode_guid_source]
|
||||||
# If show provider source and show provider id are in videos_shows_ids exactly, then the show is in the list
|
):
|
||||||
if show_guid_source in videos_shows_ids.keys():
|
episode_found = True
|
||||||
if show_guid_id in videos_shows_ids[show_guid_source]:
|
break
|
||||||
show_found = True
|
|
||||||
break
|
if episode_found:
|
||||||
|
msg = f"{show_search.title} {episode_search.title} as watched for {user.title} in {library} for Plex"
|
||||||
if show_found:
|
if not dryrun:
|
||||||
for episode_search in show_search.episodes():
|
logger(f"Marked {msg}", 0)
|
||||||
episode_found = False
|
episode_search.markWatched()
|
||||||
|
else:
|
||||||
for episode_location in episode_search.locations:
|
logger(f"Dryrun {msg}", 0)
|
||||||
if episode_location.split("/")[-1] in videos_episodes_ids["locations"]:
|
else:
|
||||||
episode_found = True
|
logger(
|
||||||
break
|
f"Plex: Skipping episode {episode_search.title} as it is not in mark list for {user.title}",
|
||||||
|
1,
|
||||||
if not episode_found:
|
)
|
||||||
for episode_guid in episode_search.guids:
|
else:
|
||||||
episode_guid_source = re.search(r'(.*)://', episode_guid.id).group(1).lower()
|
logger(
|
||||||
episode_guid_id = re.search(r'://(.*)', episode_guid.id).group(1)
|
f"Plex: Skipping show {show_search.title} as it is not in mark list for {user.title}",
|
||||||
|
1,
|
||||||
# If episode provider source and episode provider id are in videos_episodes_ids exactly, then the episode is in the list
|
)
|
||||||
if episode_guid_source in videos_episodes_ids.keys():
|
|
||||||
if episode_guid_id in videos_episodes_ids[episode_guid_source]:
|
if not videos_movies_ids and not videos_shows_ids and not videos_episodes_ids:
|
||||||
episode_found = True
|
logger(
|
||||||
break
|
f"Jellyfin: No videos to mark as watched for {user.title} in library {library}",
|
||||||
|
1,
|
||||||
if episode_found:
|
)
|
||||||
msg = f"{show_search.title} {episode_search.title} as watched for {user.title} in {library} for Plex"
|
|
||||||
if not dryrun:
|
except Exception as e:
|
||||||
logger(f"Marked {msg}", 0)
|
logger(
|
||||||
episode_search.markWatched()
|
f"Plex: Failed to update watched for {user.title} in library {library}, Error: {e}",
|
||||||
else:
|
2,
|
||||||
logger(f"Dryrun {msg}", 0)
|
)
|
||||||
else:
|
raise Exception(e)
|
||||||
logger(f"Plex: Skipping episode {episode_search.title} as it is not in mark list for {user.title}", 1)
|
|
||||||
else:
|
|
||||||
logger(f"Plex: Skipping show {show_search.title} as it is not in mark list for {user.title}", 1)
|
# class plex accept base url and token and username and password but default with none
|
||||||
|
class Plex:
|
||||||
if not videos_movies_ids and not videos_shows_ids and not videos_episodes_ids:
|
def __init__(
|
||||||
logger(f"Jellyfin: No videos to mark as watched for {user.title} in library {library}", 1)
|
self,
|
||||||
|
baseurl=None,
|
||||||
except Exception as e:
|
token=None,
|
||||||
logger(f"Plex: Failed to update watched for {user.title} in library {library}, Error: {e}", 2)
|
username=None,
|
||||||
raise Exception(e)
|
password=None,
|
||||||
|
servername=None,
|
||||||
|
ssl_bypass=False,
|
||||||
def update_watched(self, watched_list, user_mapping=None, library_mapping=None, dryrun=False):
|
):
|
||||||
try:
|
self.baseurl = baseurl
|
||||||
args = []
|
self.token = token
|
||||||
|
self.username = username
|
||||||
for user, libraries in watched_list.items():
|
self.password = password
|
||||||
user_other = None
|
self.servername = servername
|
||||||
# If type of user is dict
|
self.ssl_bypass = ssl_bypass
|
||||||
if user_mapping:
|
self.plex = self.login(self.baseurl, self.token, ssl_bypass)
|
||||||
if user in user_mapping.keys():
|
self.admin_user = self.plex.myPlexAccount()
|
||||||
user_other = user_mapping[user]
|
self.users = self.get_users()
|
||||||
elif user in user_mapping.values():
|
|
||||||
user_other = search_mapping(user_mapping, user)
|
def login(self, baseurl, token, ssl_bypass=False):
|
||||||
|
try:
|
||||||
for index, value in enumerate(self.users):
|
if baseurl and token:
|
||||||
if user.lower() == value.title.lower():
|
# Login via token
|
||||||
user = self.users[index]
|
if ssl_bypass:
|
||||||
break
|
session = requests.Session()
|
||||||
elif user_other and user_other.lower() == value.title.lower():
|
# By pass ssl hostname check https://github.com/pkkid/python-plexapi/issues/143#issuecomment-775485186
|
||||||
user = self.users[index]
|
session.mount("https://", HostNameIgnoringAdapter())
|
||||||
break
|
plex = PlexServer(baseurl, token, session=session)
|
||||||
|
else:
|
||||||
if self.admin_user == user:
|
plex = PlexServer(baseurl, token)
|
||||||
user_plex = self.plex
|
elif self.username and self.password and self.servername:
|
||||||
else:
|
# Login via plex account
|
||||||
user_plex = PlexServer(self.plex._baseurl, user.get_token(self.plex.machineIdentifier))
|
account = MyPlexAccount(self.username, self.password)
|
||||||
|
plex = account.resource(self.servername).connect()
|
||||||
for library, videos in libraries.items():
|
else:
|
||||||
library_other = None
|
raise Exception("No complete plex credentials provided")
|
||||||
if library_mapping:
|
|
||||||
if library in library_mapping.keys():
|
return plex
|
||||||
library_other = library_mapping[library]
|
except Exception as e:
|
||||||
elif library in library_mapping.values():
|
if self.username or self.password:
|
||||||
library_other = search_mapping(library_mapping, library)
|
msg = f"Failed to login via plex account {self.username}"
|
||||||
|
logger(f"Plex: Failed to login, {msg}, Error: {e}", 2)
|
||||||
# if library in plex library list
|
else:
|
||||||
library_list = user_plex.library.sections()
|
logger(f"Plex: Failed to login, Error: {e}", 2)
|
||||||
if library.lower() not in [x.title.lower() for x in library_list]:
|
raise Exception(e)
|
||||||
if library_other:
|
|
||||||
if library_other.lower() in [x.title.lower() for x in library_list]:
|
def get_users(self):
|
||||||
logger(f"Plex: Library {library} not found, but {library_other} found, using {library_other}", 1)
|
try:
|
||||||
library = library_other
|
users = self.plex.myPlexAccount().users()
|
||||||
else:
|
|
||||||
logger(f"Plex: Library {library} or {library_other} not found in library list", 2)
|
# append self to users
|
||||||
continue
|
users.append(self.plex.myPlexAccount())
|
||||||
else:
|
|
||||||
logger(f"Plex: Library {library} not found in library list", 2)
|
return users
|
||||||
continue
|
except Exception as e:
|
||||||
|
logger(f"Plex: Failed to get users, Error: {e}", 2)
|
||||||
|
raise Exception(e)
|
||||||
args.append([self.update_user_watched, user, user_plex, library, videos, dryrun])
|
|
||||||
|
def get_watched(
|
||||||
future_thread_executor(args)
|
self,
|
||||||
except Exception as e:
|
users,
|
||||||
logger(f"Plex: Failed to update watched, Error: {e}", 2)
|
blacklist_library,
|
||||||
raise Exception(e)
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
# Get all libraries
|
||||||
|
users_watched = {}
|
||||||
|
args = []
|
||||||
|
|
||||||
|
for user in users:
|
||||||
|
if self.admin_user == user:
|
||||||
|
user_plex = self.plex
|
||||||
|
else:
|
||||||
|
user_plex = self.login(
|
||||||
|
self.plex._baseurl, user.get_token(self.plex.machineIdentifier), self.ssl_bypass
|
||||||
|
)
|
||||||
|
|
||||||
|
libraries = user_plex.library.sections()
|
||||||
|
|
||||||
|
for library in libraries:
|
||||||
|
library_title = library.title
|
||||||
|
library_type = library.type
|
||||||
|
|
||||||
|
skip_reason = check_skip_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
if skip_reason:
|
||||||
|
logger(
|
||||||
|
f"Plex: Skipping library {library_title} {skip_reason}", 1
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
args.append([get_user_watched, user, user_plex, library])
|
||||||
|
|
||||||
|
for user_watched in future_thread_executor(args):
|
||||||
|
for user, user_watched_temp in user_watched.items():
|
||||||
|
if user not in users_watched:
|
||||||
|
users_watched[user] = {}
|
||||||
|
users_watched[user].update(user_watched_temp)
|
||||||
|
|
||||||
|
return users_watched
|
||||||
|
except Exception as e:
|
||||||
|
logger(f"Plex: Failed to get watched, Error: {e}", 2)
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
def update_watched(
|
||||||
|
self, watched_list, user_mapping=None, library_mapping=None, dryrun=False
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
args = []
|
||||||
|
|
||||||
|
for user, libraries in watched_list.items():
|
||||||
|
user_other = None
|
||||||
|
# If type of user is dict
|
||||||
|
if user_mapping:
|
||||||
|
if user in user_mapping.keys():
|
||||||
|
user_other = user_mapping[user]
|
||||||
|
elif user in user_mapping.values():
|
||||||
|
user_other = search_mapping(user_mapping, user)
|
||||||
|
|
||||||
|
for index, value in enumerate(self.users):
|
||||||
|
if user.lower() == value.title.lower():
|
||||||
|
user = self.users[index]
|
||||||
|
break
|
||||||
|
elif user_other and user_other.lower() == value.title.lower():
|
||||||
|
user = self.users[index]
|
||||||
|
break
|
||||||
|
|
||||||
|
if self.admin_user == user:
|
||||||
|
user_plex = self.plex
|
||||||
|
else:
|
||||||
|
user_plex = PlexServer(
|
||||||
|
self.plex._baseurl, user.get_token(self.plex.machineIdentifier)
|
||||||
|
)
|
||||||
|
|
||||||
|
for library, videos in libraries.items():
|
||||||
|
library_other = None
|
||||||
|
if library_mapping:
|
||||||
|
if library in library_mapping.keys():
|
||||||
|
library_other = library_mapping[library]
|
||||||
|
elif library in library_mapping.values():
|
||||||
|
library_other = search_mapping(library_mapping, library)
|
||||||
|
|
||||||
|
# if library in plex library list
|
||||||
|
library_list = user_plex.library.sections()
|
||||||
|
if library.lower() not in [x.title.lower() for x in library_list]:
|
||||||
|
if library_other:
|
||||||
|
if library_other.lower() in [
|
||||||
|
x.title.lower() for x in library_list
|
||||||
|
]:
|
||||||
|
logger(
|
||||||
|
f"Plex: Library {library} not found, but {library_other} found, using {library_other}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
library = library_other
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Plex: Library {library} or {library_other} not found in library list",
|
||||||
|
2,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Plex: Library {library} not found in library list", 2
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
args.append(
|
||||||
|
[
|
||||||
|
update_user_watched,
|
||||||
|
user,
|
||||||
|
user_plex,
|
||||||
|
library,
|
||||||
|
videos,
|
||||||
|
dryrun,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
future_thread_executor(args)
|
||||||
|
except Exception as e:
|
||||||
|
logger(f"Plex: Failed to update watched, Error: {e}", 2)
|
||||||
|
raise Exception(e)
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
pytest
|
pytest
|
||||||
|
|||||||
78
test/test_main.py
Normal file
78
test/test_main.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
# getting the name of the directory
|
||||||
|
# where the this file is present.
|
||||||
|
current = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
|
# Getting the parent directory name
|
||||||
|
# where the current directory is present.
|
||||||
|
parent = os.path.dirname(current)
|
||||||
|
|
||||||
|
# adding the parent directory to
|
||||||
|
# the sys.path.
|
||||||
|
sys.path.append(parent)
|
||||||
|
|
||||||
|
from src.main import setup_black_white_lists
|
||||||
|
|
||||||
|
|
||||||
|
def test_setup_black_white_lists():
|
||||||
|
# Simple
|
||||||
|
blacklist_library = "library1, library2"
|
||||||
|
whitelist_library = "library1, library2"
|
||||||
|
blacklist_library_type = "library_type1, library_type2"
|
||||||
|
whitelist_library_type = "library_type1, library_type2"
|
||||||
|
blacklist_users = "user1, user2"
|
||||||
|
whitelist_users = "user1, user2"
|
||||||
|
|
||||||
|
(
|
||||||
|
results_blacklist_library,
|
||||||
|
return_whitelist_library,
|
||||||
|
return_blacklist_library_type,
|
||||||
|
return_whitelist_library_type,
|
||||||
|
return_blacklist_users,
|
||||||
|
return_whitelist_users,
|
||||||
|
) = setup_black_white_lists(
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
blacklist_users,
|
||||||
|
whitelist_users,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert results_blacklist_library == ["library1", "library2"]
|
||||||
|
assert return_whitelist_library == ["library1", "library2"]
|
||||||
|
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
||||||
|
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
||||||
|
assert return_blacklist_users == ["user1", "user2"]
|
||||||
|
assert return_whitelist_users == ["user1", "user2"]
|
||||||
|
|
||||||
|
# Library Mapping and user mapping
|
||||||
|
library_mapping = {"library1": "library3"}
|
||||||
|
user_mapping = {"user1": "user3"}
|
||||||
|
|
||||||
|
(
|
||||||
|
results_blacklist_library,
|
||||||
|
return_whitelist_library,
|
||||||
|
return_blacklist_library_type,
|
||||||
|
return_whitelist_library_type,
|
||||||
|
return_blacklist_users,
|
||||||
|
return_whitelist_users,
|
||||||
|
) = setup_black_white_lists(
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
blacklist_users,
|
||||||
|
whitelist_users,
|
||||||
|
library_mapping,
|
||||||
|
user_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert results_blacklist_library == ["library1", "library2", "library3"]
|
||||||
|
assert return_whitelist_library == ["library1", "library2", "library3"]
|
||||||
|
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
||||||
|
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
||||||
|
assert return_blacklist_users == ["user1", "user2", "user3"]
|
||||||
|
assert return_whitelist_users == ["user1", "user2", "user3"]
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# getting the name of the directory
|
|
||||||
# where the this file is present.
|
|
||||||
current = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
|
|
||||||
# Getting the parent directory name
|
|
||||||
# where the current directory is present.
|
|
||||||
parent = os.path.dirname(current)
|
|
||||||
|
|
||||||
# adding the parent directory to
|
|
||||||
# the sys.path.
|
|
||||||
sys.path.append(parent)
|
|
||||||
|
|
||||||
from src.main import setup_black_white_lists
|
|
||||||
|
|
||||||
def test_setup_black_white_lists():
|
|
||||||
# Simple
|
|
||||||
blacklist_library = 'library1, library2'
|
|
||||||
whitelist_library = 'library1, library2'
|
|
||||||
blacklist_library_type = 'library_type1, library_type2'
|
|
||||||
whitelist_library_type = 'library_type1, library_type2'
|
|
||||||
blacklist_users = 'user1, user2'
|
|
||||||
whitelist_users = 'user1, user2'
|
|
||||||
|
|
||||||
results_blacklist_library, return_whitelist_library, return_blacklist_library_type, return_whitelist_library_type, return_blacklist_users, return_whitelist_users = setup_black_white_lists(blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, blacklist_users, whitelist_users)
|
|
||||||
|
|
||||||
assert results_blacklist_library == ['library1', 'library2']
|
|
||||||
assert return_whitelist_library == ['library1', 'library2']
|
|
||||||
assert return_blacklist_library_type == ['library_type1', 'library_type2']
|
|
||||||
assert return_whitelist_library_type == ['library_type1', 'library_type2']
|
|
||||||
assert return_blacklist_users == ['user1', 'user2']
|
|
||||||
assert return_whitelist_users == ['user1', 'user2']
|
|
||||||
|
|
||||||
# Library Mapping and user mapping
|
|
||||||
library_mapping = { "library1": "library3" }
|
|
||||||
user_mapping = { "user1": "user3" }
|
|
||||||
|
|
||||||
results_blacklist_library, return_whitelist_library, return_blacklist_library_type, return_whitelist_library_type, return_blacklist_users, return_whitelist_users = setup_black_white_lists(blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, blacklist_users, whitelist_users, library_mapping, user_mapping)
|
|
||||||
|
|
||||||
assert results_blacklist_library == ['library1', 'library2', 'library3']
|
|
||||||
assert return_whitelist_library == ['library1', 'library2', 'library3']
|
|
||||||
assert return_blacklist_library_type == ['library_type1', 'library_type2']
|
|
||||||
assert return_whitelist_library_type == ['library_type1', 'library_type2']
|
|
||||||
assert return_blacklist_users == ['user1', 'user2', 'user3']
|
|
||||||
assert return_whitelist_users == ['user1', 'user2', 'user3']
|
|
||||||
@@ -1,176 +1,301 @@
|
|||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
|
||||||
# getting the name of the directory
|
# getting the name of the directory
|
||||||
# where the this file is present.
|
# where the this file is present.
|
||||||
current = os.path.dirname(os.path.realpath(__file__))
|
current = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
# Getting the parent directory name
|
# Getting the parent directory name
|
||||||
# where the current directory is present.
|
# where the current directory is present.
|
||||||
parent = os.path.dirname(current)
|
parent = os.path.dirname(current)
|
||||||
|
|
||||||
# adding the parent directory to
|
# adding the parent directory to
|
||||||
# the sys.path.
|
# the sys.path.
|
||||||
sys.path.append(parent)
|
sys.path.append(parent)
|
||||||
|
|
||||||
from src.main import cleanup_watched
|
from src.main import cleanup_watched
|
||||||
|
|
||||||
tv_shows_watched_list_1 = {
|
tv_shows_watched_list_1 = {
|
||||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
frozenset(
|
||||||
"Season 1": [
|
{
|
||||||
{'imdb': 'tt0550489', 'tmdb': '282843', 'tvdb': '176357', 'locations': ('Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv',)},
|
("tvdb", "75710"),
|
||||||
{'imdb': 'tt0550487', 'tmdb': '282861', 'tvdb': '300385', 'locations': ('Criminal Minds S01E02 Compulsion WEBDL-720p.mkv',)}
|
("title", "Criminal Minds"),
|
||||||
]
|
("imdb", "tt0452046"),
|
||||||
},
|
("locations", ("Criminal Minds",)),
|
||||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
("tmdb", "4057"),
|
||||||
"Season 1": [
|
}
|
||||||
{'locations': ('Test S01E01.mkv',)},
|
): {
|
||||||
{'locations': ('Test S01E02.mkv',)}
|
"Season 1": [
|
||||||
]
|
{
|
||||||
}
|
"imdb": "tt0550489",
|
||||||
}
|
"tmdb": "282843",
|
||||||
|
"tvdb": "176357",
|
||||||
movies_watched_list_1 = [
|
"locations": (
|
||||||
{"imdb":"tt2380307", "tmdb":"354912", 'title': 'Coco', 'locations': ('Coco (2017) Remux-1080p.mkv',)},
|
"Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",
|
||||||
{"tmdbcollection":"448150", "imdb":"tt1431045", "tmdb":"293660", 'title': 'Deadpool', 'locations': ('Deadpool (2016) Remux-1080p.mkv',)},
|
),
|
||||||
]
|
},
|
||||||
|
{
|
||||||
tv_shows_watched_list_2 = {
|
"imdb": "tt0550487",
|
||||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
"tmdb": "282861",
|
||||||
"Season 1": [
|
"tvdb": "300385",
|
||||||
{'imdb': 'tt0550487', 'tmdb': '282861', 'tvdb': '300385', 'locations': ('Criminal Minds S01E02 Compulsion WEBDL-720p.mkv',)},
|
"locations": ("Criminal Minds S01E02 Compulsion WEBDL-720p.mkv",),
|
||||||
{'imdb': 'tt0550498', 'tmdb': '282865', 'tvdb': '300474', 'locations': ("Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",)}
|
},
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||||
"Season 1": [
|
"Season 1": [
|
||||||
{'locations': ('Test S01E02.mkv',)},
|
{"locations": ("Test S01E01.mkv",)},
|
||||||
{'locations': ('Test S01E03.mkv',)}
|
{"locations": ("Test S01E02.mkv",)},
|
||||||
]
|
]
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
movies_watched_list_2 = [
|
movies_watched_list_1 = [
|
||||||
{"imdb":"tt2380307", "tmdb":"354912", 'title': 'Coco', 'locations': ('Coco (2017) Remux-1080p.mkv',)},
|
{
|
||||||
{'imdb': 'tt0384793', 'tmdb': '9788', 'tvdb': '9103', 'title': 'Accepted', 'locations': ('Accepted (2006) Remux-1080p.mkv',)}
|
"imdb": "tt2380307",
|
||||||
]
|
"tmdb": "354912",
|
||||||
|
"title": "Coco",
|
||||||
# Test to see if objects get deleted all the way up to the root.
|
"locations": ("Coco (2017) Remux-1080p.mkv",),
|
||||||
tv_shows_2_watched_list_1 = {
|
},
|
||||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
{
|
||||||
"Season 1": [
|
"tmdbcollection": "448150",
|
||||||
{'imdb': 'tt0550489', 'tmdb': '282843', 'tvdb': '176357', 'locations': ('Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv',)},
|
"imdb": "tt1431045",
|
||||||
]
|
"tmdb": "293660",
|
||||||
}
|
"title": "Deadpool",
|
||||||
}
|
"locations": ("Deadpool (2016) Remux-1080p.mkv",),
|
||||||
|
},
|
||||||
expected_tv_show_watched_list_1 = {
|
]
|
||||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
|
||||||
"Season 1": [
|
tv_shows_watched_list_2 = {
|
||||||
{'imdb': 'tt0550489', 'tmdb': '282843', 'tvdb': '176357', 'locations': ('Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv',)}
|
frozenset(
|
||||||
]
|
{
|
||||||
},
|
("tvdb", "75710"),
|
||||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
("title", "Criminal Minds"),
|
||||||
"Season 1": [
|
("imdb", "tt0452046"),
|
||||||
{'locations': ('Test S01E01.mkv',)}
|
("locations", ("Criminal Minds",)),
|
||||||
]
|
("tmdb", "4057"),
|
||||||
}
|
}
|
||||||
}
|
): {
|
||||||
|
"Season 1": [
|
||||||
expected_movie_watched_list_1 = [
|
{
|
||||||
{"tmdbcollection":"448150", "imdb":"tt1431045", "tmdb":"293660", 'title': 'Deadpool', 'locations': ('Deadpool (2016) Remux-1080p.mkv',)}
|
"imdb": "tt0550487",
|
||||||
]
|
"tmdb": "282861",
|
||||||
|
"tvdb": "300385",
|
||||||
expected_tv_show_watched_list_2 = {
|
"locations": ("Criminal Minds S01E02 Compulsion WEBDL-720p.mkv",),
|
||||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
},
|
||||||
"Season 1": [
|
{
|
||||||
{'imdb': 'tt0550498', 'tmdb': '282865', 'tvdb': '300474', 'locations': ("Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",)}
|
"imdb": "tt0550498",
|
||||||
]
|
"tmdb": "282865",
|
||||||
},
|
"tvdb": "300474",
|
||||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
"locations": (
|
||||||
"Season 1": [
|
"Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",
|
||||||
{'locations': ('Test S01E03.mkv',)}
|
),
|
||||||
]
|
},
|
||||||
}
|
]
|
||||||
}
|
},
|
||||||
|
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||||
expected_movie_watched_list_2 = [
|
"Season 1": [
|
||||||
{'imdb': 'tt0384793', 'tmdb': '9788', 'tvdb': '9103', 'title': 'Accepted', 'locations': ('Accepted (2006) Remux-1080p.mkv',)}
|
{"locations": ("Test S01E02.mkv",)},
|
||||||
]
|
{"locations": ("Test S01E03.mkv",)},
|
||||||
|
]
|
||||||
|
},
|
||||||
def test_simple_cleanup_watched():
|
}
|
||||||
user_watched_list_1 = {
|
|
||||||
"user1": {
|
movies_watched_list_2 = [
|
||||||
"TV Shows": tv_shows_watched_list_1,
|
{
|
||||||
"Movies": movies_watched_list_1,
|
"imdb": "tt2380307",
|
||||||
"Other Shows": tv_shows_2_watched_list_1
|
"tmdb": "354912",
|
||||||
},
|
"title": "Coco",
|
||||||
}
|
"locations": ("Coco (2017) Remux-1080p.mkv",),
|
||||||
user_watched_list_2 = {
|
},
|
||||||
"user1": {
|
{
|
||||||
"TV Shows": tv_shows_watched_list_2,
|
"imdb": "tt0384793",
|
||||||
"Movies": movies_watched_list_2,
|
"tmdb": "9788",
|
||||||
"Other Shows": tv_shows_2_watched_list_1
|
"tvdb": "9103",
|
||||||
}
|
"title": "Accepted",
|
||||||
}
|
"locations": ("Accepted (2006) Remux-1080p.mkv",),
|
||||||
|
},
|
||||||
expected_watched_list_1 = {
|
]
|
||||||
"user1": {
|
|
||||||
"TV Shows": expected_tv_show_watched_list_1
|
# Test to see if objects get deleted all the way up to the root.
|
||||||
, "Movies": expected_movie_watched_list_1
|
tv_shows_2_watched_list_1 = {
|
||||||
}
|
frozenset(
|
||||||
}
|
{
|
||||||
|
("tvdb", "75710"),
|
||||||
expected_watched_list_2 = {
|
("title", "Criminal Minds"),
|
||||||
"user1": {
|
("imdb", "tt0452046"),
|
||||||
"TV Shows": expected_tv_show_watched_list_2
|
("locations", ("Criminal Minds",)),
|
||||||
, "Movies": expected_movie_watched_list_2
|
("tmdb", "4057"),
|
||||||
}
|
}
|
||||||
}
|
): {
|
||||||
|
"Season 1": [
|
||||||
return_watched_list_1 = cleanup_watched(user_watched_list_1, user_watched_list_2)
|
{
|
||||||
return_watched_list_2 = cleanup_watched(user_watched_list_2, user_watched_list_1)
|
"imdb": "tt0550489",
|
||||||
|
"tmdb": "282843",
|
||||||
assert return_watched_list_1 == expected_watched_list_1
|
"tvdb": "176357",
|
||||||
assert return_watched_list_2 == expected_watched_list_2
|
"locations": (
|
||||||
|
"Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",
|
||||||
|
),
|
||||||
def test_mapping_cleanup_watched():
|
},
|
||||||
user_watched_list_1 = {
|
]
|
||||||
"user1": {
|
}
|
||||||
"TV Shows": tv_shows_watched_list_1,
|
}
|
||||||
"Movies": movies_watched_list_1,
|
|
||||||
"Other Shows": tv_shows_2_watched_list_1
|
expected_tv_show_watched_list_1 = {
|
||||||
},
|
frozenset(
|
||||||
}
|
{
|
||||||
user_watched_list_2 = {
|
("tvdb", "75710"),
|
||||||
"user2": {
|
("title", "Criminal Minds"),
|
||||||
"Shows": tv_shows_watched_list_2,
|
("imdb", "tt0452046"),
|
||||||
"Movies": movies_watched_list_2,
|
("locations", ("Criminal Minds",)),
|
||||||
"Other Shows": tv_shows_2_watched_list_1
|
("tmdb", "4057"),
|
||||||
}
|
}
|
||||||
}
|
): {
|
||||||
|
"Season 1": [
|
||||||
expected_watched_list_1 = {
|
{
|
||||||
"user1": {
|
"imdb": "tt0550489",
|
||||||
"TV Shows": expected_tv_show_watched_list_1
|
"tmdb": "282843",
|
||||||
, "Movies": expected_movie_watched_list_1
|
"tvdb": "176357",
|
||||||
}
|
"locations": (
|
||||||
}
|
"Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",
|
||||||
|
),
|
||||||
expected_watched_list_2 = {
|
}
|
||||||
"user2": {
|
]
|
||||||
"Shows": expected_tv_show_watched_list_2
|
},
|
||||||
, "Movies": expected_movie_watched_list_2
|
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||||
}
|
"Season 1": [{"locations": ("Test S01E01.mkv",)}]
|
||||||
}
|
},
|
||||||
|
}
|
||||||
user_mapping = { "user1": "user2" }
|
|
||||||
library_mapping = { "TV Shows": "Shows" }
|
expected_movie_watched_list_1 = [
|
||||||
|
{
|
||||||
return_watched_list_1 = cleanup_watched(user_watched_list_1, user_watched_list_2, user_mapping=user_mapping, library_mapping=library_mapping)
|
"tmdbcollection": "448150",
|
||||||
return_watched_list_2 = cleanup_watched(user_watched_list_2, user_watched_list_1, user_mapping=user_mapping, library_mapping=library_mapping)
|
"imdb": "tt1431045",
|
||||||
|
"tmdb": "293660",
|
||||||
assert return_watched_list_1 == expected_watched_list_1
|
"title": "Deadpool",
|
||||||
assert return_watched_list_2 == expected_watched_list_2
|
"locations": ("Deadpool (2016) Remux-1080p.mkv",),
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
expected_tv_show_watched_list_2 = {
|
||||||
|
frozenset(
|
||||||
|
{
|
||||||
|
("tvdb", "75710"),
|
||||||
|
("title", "Criminal Minds"),
|
||||||
|
("imdb", "tt0452046"),
|
||||||
|
("locations", ("Criminal Minds",)),
|
||||||
|
("tmdb", "4057"),
|
||||||
|
}
|
||||||
|
): {
|
||||||
|
"Season 1": [
|
||||||
|
{
|
||||||
|
"imdb": "tt0550498",
|
||||||
|
"tmdb": "282865",
|
||||||
|
"tvdb": "300474",
|
||||||
|
"locations": (
|
||||||
|
"Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",
|
||||||
|
),
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||||
|
"Season 1": [{"locations": ("Test S01E03.mkv",)}]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_movie_watched_list_2 = [
|
||||||
|
{
|
||||||
|
"imdb": "tt0384793",
|
||||||
|
"tmdb": "9788",
|
||||||
|
"tvdb": "9103",
|
||||||
|
"title": "Accepted",
|
||||||
|
"locations": ("Accepted (2006) Remux-1080p.mkv",),
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_simple_cleanup_watched():
|
||||||
|
user_watched_list_1 = {
|
||||||
|
"user1": {
|
||||||
|
"TV Shows": tv_shows_watched_list_1,
|
||||||
|
"Movies": movies_watched_list_1,
|
||||||
|
"Other Shows": tv_shows_2_watched_list_1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
user_watched_list_2 = {
|
||||||
|
"user1": {
|
||||||
|
"TV Shows": tv_shows_watched_list_2,
|
||||||
|
"Movies": movies_watched_list_2,
|
||||||
|
"Other Shows": tv_shows_2_watched_list_1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_watched_list_1 = {
|
||||||
|
"user1": {
|
||||||
|
"TV Shows": expected_tv_show_watched_list_1,
|
||||||
|
"Movies": expected_movie_watched_list_1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_watched_list_2 = {
|
||||||
|
"user1": {
|
||||||
|
"TV Shows": expected_tv_show_watched_list_2,
|
||||||
|
"Movies": expected_movie_watched_list_2,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return_watched_list_1 = cleanup_watched(user_watched_list_1, user_watched_list_2)
|
||||||
|
return_watched_list_2 = cleanup_watched(user_watched_list_2, user_watched_list_1)
|
||||||
|
|
||||||
|
assert return_watched_list_1 == expected_watched_list_1
|
||||||
|
assert return_watched_list_2 == expected_watched_list_2
|
||||||
|
|
||||||
|
|
||||||
|
def test_mapping_cleanup_watched():
|
||||||
|
user_watched_list_1 = {
|
||||||
|
"user1": {
|
||||||
|
"TV Shows": tv_shows_watched_list_1,
|
||||||
|
"Movies": movies_watched_list_1,
|
||||||
|
"Other Shows": tv_shows_2_watched_list_1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
user_watched_list_2 = {
|
||||||
|
"user2": {
|
||||||
|
"Shows": tv_shows_watched_list_2,
|
||||||
|
"Movies": movies_watched_list_2,
|
||||||
|
"Other Shows": tv_shows_2_watched_list_1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_watched_list_1 = {
|
||||||
|
"user1": {
|
||||||
|
"TV Shows": expected_tv_show_watched_list_1,
|
||||||
|
"Movies": expected_movie_watched_list_1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_watched_list_2 = {
|
||||||
|
"user2": {
|
||||||
|
"Shows": expected_tv_show_watched_list_2,
|
||||||
|
"Movies": expected_movie_watched_list_2,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
user_mapping = {"user1": "user2"}
|
||||||
|
library_mapping = {"TV Shows": "Shows"}
|
||||||
|
|
||||||
|
return_watched_list_1 = cleanup_watched(
|
||||||
|
user_watched_list_1,
|
||||||
|
user_watched_list_2,
|
||||||
|
user_mapping=user_mapping,
|
||||||
|
library_mapping=library_mapping,
|
||||||
|
)
|
||||||
|
return_watched_list_2 = cleanup_watched(
|
||||||
|
user_watched_list_2,
|
||||||
|
user_watched_list_1,
|
||||||
|
user_mapping=user_mapping,
|
||||||
|
library_mapping=library_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert return_watched_list_1 == expected_watched_list_1
|
||||||
|
assert return_watched_list_2 == expected_watched_list_2
|
||||||
|
|||||||
Reference in New Issue
Block a user