Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9afc00443c | ||
|
|
3ec177ea64 | ||
|
|
b360c9fd0b | ||
|
|
1ed791b1ed | ||
|
|
f19b1a3063 | ||
|
|
190a72bd3c | ||
|
|
c848106ce7 | ||
|
|
dd319271bd | ||
|
|
16879cc728 | ||
|
|
942ec3533f | ||
|
|
9f6edfc91a | ||
|
|
827ace2e97 | ||
|
|
f6b57a1b4d | ||
|
|
88a7526721 | ||
|
|
1efb4d8543 |
84
.env.sample
84
.env.sample
@@ -1,42 +1,42 @@
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "True"
|
||||
## Additional logging information
|
||||
DEBUG = "True"
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "info"
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = "3600"
|
||||
## Log file where all output will be written to
|
||||
LOGFILE = "log.log"
|
||||
## Map usernames between plex and jellyfin in the event that they are different, order does not matter
|
||||
#USER_MAPPING = { "testuser2": "testuser3" }
|
||||
## Map libraries between plex and jellyfin in the even that they are different, order does not matter
|
||||
#LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400"
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
PLEX_TOKEN = "SuperSecretToken"
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
#PLEX_USERNAME = ""
|
||||
#PLEX_PASSWORD = ""
|
||||
#PLEX_SERVERNAME = "Plex Server"
|
||||
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
JELLYFIN_TOKEN = "SuperSecretToken"
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = ""
|
||||
#BLACKLIST_LIBRARY_TYPE = ""
|
||||
#WHITELIST_LIBRARY_TYPE = ""
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "testuser1,testuser2"
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "True"
|
||||
## Additional logging information
|
||||
DEBUG = "True"
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "info"
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = "3600"
|
||||
## Log file where all output will be written to
|
||||
LOGFILE = "log.log"
|
||||
## Map usernames between plex and jellyfin in the event that they are different, order does not matter
|
||||
#USER_MAPPING = { "testuser2": "testuser3" }
|
||||
## Map libraries between plex and jellyfin in the even that they are different, order does not matter
|
||||
#LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400"
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
PLEX_TOKEN = "SuperSecretToken"
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
#PLEX_USERNAME = ""
|
||||
#PLEX_PASSWORD = ""
|
||||
#PLEX_SERVERNAME = "Plex Server"
|
||||
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
JELLYFIN_TOKEN = "SuperSecretToken"
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = ""
|
||||
#BLACKLIST_LIBRARY_TYPE = ""
|
||||
#WHITELIST_LIBRARY_TYPE = ""
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "testuser1,testuser2"
|
||||
|
||||
172
.github/workflows/ci.yml
vendored
172
.github/workflows/ci.yml
vendored
@@ -1,86 +1,86 @@
|
||||
name: CI
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- .gitignore
|
||||
- "*.md"
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- .gitignore
|
||||
- "*.md"
|
||||
|
||||
jobs:
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: "Install dependencies"
|
||||
run: pip install -r requirements.txt && pip install -r test/requirements.txt
|
||||
|
||||
- name: "Run tests"
|
||||
run: pytest -vvv
|
||||
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
needs: pytest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Docker meta
|
||||
id: docker_meta
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
if: "${{ env.DOCKER_USERNAME != '' }}"
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ secrets.DOCKER_USERNAME }}/jellyplex-watched # list of Docker images to use as base name for tags
|
||||
tags: |
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=sha
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Build
|
||||
id: build
|
||||
if: "${{ steps.docker_meta.outcome == 'skipped' }}"
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: false
|
||||
tags: jellyplex-watched:action
|
||||
|
||||
- name: Build Push
|
||||
id: build_push
|
||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||
|
||||
# Echo digest so users can validate their image
|
||||
- name: Image digest
|
||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||
run: echo "${{ steps.build_push.outputs.digest }}"
|
||||
name: CI
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- .gitignore
|
||||
- "*.md"
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- .gitignore
|
||||
- "*.md"
|
||||
|
||||
jobs:
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: "Install dependencies"
|
||||
run: pip install -r requirements.txt && pip install -r test/requirements.txt
|
||||
|
||||
- name: "Run tests"
|
||||
run: pytest -vvv
|
||||
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
needs: pytest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Docker meta
|
||||
id: docker_meta
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
if: "${{ env.DOCKER_USERNAME != '' }}"
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: ${{ secrets.DOCKER_USERNAME }}/jellyplex-watched # list of Docker images to use as base name for tags
|
||||
tags: |
|
||||
type=raw,value=latest,enable={{is_default_branch}}
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=sha
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Build
|
||||
id: build
|
||||
if: "${{ steps.docker_meta.outcome == 'skipped' }}"
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: false
|
||||
tags: jellyplex-watched:action
|
||||
|
||||
- name: Build Push
|
||||
id: build_push
|
||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||
|
||||
# Echo digest so users can validate their image
|
||||
- name: Image digest
|
||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||
run: echo "${{ steps.build_push.outputs.digest }}"
|
||||
|
||||
264
.gitignore
vendored
264
.gitignore
vendored
@@ -1,132 +1,132 @@
|
||||
.env
|
||||
*.prof
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
pip-wheel-metadata/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
.env
|
||||
*.prof
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
pip-wheel-metadata/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
32
.vscode/launch.json
vendored
32
.vscode/launch.json
vendored
@@ -1,16 +1,16 @@
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: Main",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"program": "main.py",
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true
|
||||
}
|
||||
]
|
||||
}
|
||||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: Main",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"program": "main.py",
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
70
Dockerfile
70
Dockerfile
@@ -1,35 +1,35 @@
|
||||
FROM python:3-slim
|
||||
|
||||
ENV DRYRUN 'True'
|
||||
ENV DEBUG 'True'
|
||||
ENV DEBUG_LEVEL 'INFO'
|
||||
ENV SLEEP_DURATION '3600'
|
||||
ENV LOGFILE 'log.log'
|
||||
|
||||
ENV USER_MAPPING '{ "User Test": "User Test2" }'
|
||||
ENV LIBRARY_MAPPING '{ "Shows Test": "TV Shows Test" }'
|
||||
|
||||
ENV PLEX_BASEURL 'http://localhost:32400'
|
||||
ENV PLEX_TOKEN ''
|
||||
ENV PLEX_USERNAME ''
|
||||
ENV PLEX_PASSWORD ''
|
||||
ENV PLEX_SERVERNAME ''
|
||||
|
||||
ENV JELLYFIN_BASEURL 'http://localhost:8096'
|
||||
ENV JELLYFIN_TOKEN ''
|
||||
|
||||
ENV BLACKLIST_LIBRARY ''
|
||||
ENV WHITELIST_LIBRARY ''
|
||||
ENV BLACKLIST_LIBRARY_TYPE ''
|
||||
ENV WHITELIST_LIBRARY_TYPE ''
|
||||
ENV BLACKLIST_USERS ''
|
||||
ENV WHITELIST_USERS ''
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ./requirements.txt ./
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
CMD ["python", "-u", "main.py"]
|
||||
FROM python:3-slim
|
||||
|
||||
ENV DRYRUN 'True'
|
||||
ENV DEBUG 'True'
|
||||
ENV DEBUG_LEVEL 'INFO'
|
||||
ENV SLEEP_DURATION '3600'
|
||||
ENV LOGFILE 'log.log'
|
||||
|
||||
ENV USER_MAPPING '{ "User Test": "User Test2" }'
|
||||
ENV LIBRARY_MAPPING '{ "Shows Test": "TV Shows Test" }'
|
||||
|
||||
ENV PLEX_BASEURL 'http://localhost:32400'
|
||||
ENV PLEX_TOKEN ''
|
||||
ENV PLEX_USERNAME ''
|
||||
ENV PLEX_PASSWORD ''
|
||||
ENV PLEX_SERVERNAME ''
|
||||
|
||||
ENV JELLYFIN_BASEURL 'http://localhost:8096'
|
||||
ENV JELLYFIN_TOKEN ''
|
||||
|
||||
ENV BLACKLIST_LIBRARY ''
|
||||
ENV WHITELIST_LIBRARY ''
|
||||
ENV BLACKLIST_LIBRARY_TYPE ''
|
||||
ENV WHITELIST_LIBRARY_TYPE ''
|
||||
ENV BLACKLIST_USERS ''
|
||||
ENV WHITELIST_USERS ''
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ./requirements.txt ./
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
CMD ["python", "-u", "main.py"]
|
||||
|
||||
146
README.md
146
README.md
@@ -1,73 +1,73 @@
|
||||
# JellyPlex-Watched
|
||||
|
||||
[](https://www.codacy.com/gh/luigi311/JellyPlex-Watched/dashboard?utm_source=github.com&utm_medium=referral&utm_content=luigi311/JellyPlex-Watched&utm_campaign=Badge_Grade)
|
||||
|
||||
Sync watched between jellyfin and plex
|
||||
|
||||
## Description
|
||||
|
||||
Keep in sync all your users watched history between jellyfin and plex servers locally. This uses the imdb ids and any other matching id to find the correct episode/movie between the two. This is not perfect but it works for most cases. You can use this for as many servers as you want by enterying multiple options in the .env plex/jellyfin section seperated by commas.
|
||||
|
||||
## Configuration
|
||||
|
||||
|
||||
|
||||
## Installation
|
||||
|
||||
### Baremetal
|
||||
|
||||
- Setup virtualenv of your choice
|
||||
|
||||
- Install dependencies
|
||||
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
- Create a .env file similar to .env.sample, uncomment whitelist and blacklist if needed, fill in baseurls and tokens
|
||||
|
||||
- Run
|
||||
|
||||
```bash
|
||||
python main.py
|
||||
```
|
||||
|
||||
### Docker
|
||||
|
||||
- Build docker image
|
||||
|
||||
```bash
|
||||
docker build -t jellyplex-watched .
|
||||
```
|
||||
|
||||
- or use pre-built image
|
||||
|
||||
```bash
|
||||
docker pull luigi311/jellyplex-watched:latest
|
||||
```
|
||||
|
||||
#### With variables
|
||||
|
||||
- Run
|
||||
|
||||
```bash
|
||||
docker run --rm -it -e PLEX_TOKEN='SuperSecretToken' luigi311/jellyplex-watched:latest
|
||||
```
|
||||
|
||||
#### With .env
|
||||
|
||||
- Create a .env file similar to .env.sample and set the MNEMONIC variable to your seed phrase
|
||||
|
||||
- Run
|
||||
|
||||
```bash
|
||||
docker run --rm -it -v "$(pwd)/.env:/app/.env" luigi311/jellyplex-watched:latest
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
I am open to recieving pull requests. If you are submitting a pull request, please make sure run it locally for a day or two to make sure it is working as expected and stable. Make all pull requests against the dev branch and nothing will be merged into the main without going through the lower branches.
|
||||
|
||||
## License
|
||||
|
||||
This is currently under the GNU General Public License v3.0.
|
||||
# JellyPlex-Watched
|
||||
|
||||
[](https://www.codacy.com/gh/luigi311/JellyPlex-Watched/dashboard?utm_source=github.com&utm_medium=referral&utm_content=luigi311/JellyPlex-Watched&utm_campaign=Badge_Grade)
|
||||
|
||||
Sync watched between jellyfin and plex
|
||||
|
||||
## Description
|
||||
|
||||
Keep in sync all your users watched history between jellyfin and plex servers locally. This uses the imdb ids and any other matching id to find the correct episode/movie between the two. This is not perfect but it works for most cases. You can use this for as many servers as you want by enterying multiple options in the .env plex/jellyfin section seperated by commas.
|
||||
|
||||
## Configuration
|
||||
|
||||
|
||||
|
||||
## Installation
|
||||
|
||||
### Baremetal
|
||||
|
||||
- Setup virtualenv of your choice
|
||||
|
||||
- Install dependencies
|
||||
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
- Create a .env file similar to .env.sample, uncomment whitelist and blacklist if needed, fill in baseurls and tokens
|
||||
|
||||
- Run
|
||||
|
||||
```bash
|
||||
python main.py
|
||||
```
|
||||
|
||||
### Docker
|
||||
|
||||
- Build docker image
|
||||
|
||||
```bash
|
||||
docker build -t jellyplex-watched .
|
||||
```
|
||||
|
||||
- or use pre-built image
|
||||
|
||||
```bash
|
||||
docker pull luigi311/jellyplex-watched:latest
|
||||
```
|
||||
|
||||
#### With variables
|
||||
|
||||
- Run
|
||||
|
||||
```bash
|
||||
docker run --rm -it -e PLEX_TOKEN='SuperSecretToken' luigi311/jellyplex-watched:latest
|
||||
```
|
||||
|
||||
#### With .env
|
||||
|
||||
- Create a .env file similar to .env.sample and set the variables to match your setup
|
||||
|
||||
- Run
|
||||
|
||||
```bash
|
||||
docker run --rm -it -v "$(pwd)/.env:/app/.env" luigi311/jellyplex-watched:latest
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
I am open to recieving pull requests. If you are submitting a pull request, please make sure run it locally for a day or two to make sure it is working as expected and stable. Make all pull requests against the dev branch and nothing will be merged into the main without going through the lower branches.
|
||||
|
||||
## License
|
||||
|
||||
This is currently under the GNU General Public License v3.0.
|
||||
|
||||
21
main.py
21
main.py
@@ -1,10 +1,11 @@
|
||||
import sys
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Check python version 3.6 or higher
|
||||
if not (3, 6) <= tuple(map(int, sys.version_info[:2])):
|
||||
print("This script requires Python 3.6 or higher")
|
||||
sys.exit(1)
|
||||
|
||||
from src.main import main
|
||||
main()
|
||||
import sys
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Check python version 3.6 or higher
|
||||
if not (3, 6) <= tuple(map(int, sys.version_info[:2])):
|
||||
print("This script requires Python 3.6 or higher")
|
||||
sys.exit(1)
|
||||
|
||||
from src.main import main
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
plexapi
|
||||
requests
|
||||
python-dotenv
|
||||
plexapi
|
||||
requests
|
||||
python-dotenv
|
||||
aiohttp
|
||||
|
||||
344
src/functions.py
344
src/functions.py
@@ -1,155 +1,189 @@
|
||||
import os
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv(override=True)
|
||||
|
||||
logfile = os.getenv("LOGFILE","log.log")
|
||||
|
||||
def logger(message: str, log_type=0):
|
||||
debug = str_to_bool(os.getenv("DEBUG", "True"))
|
||||
debug_level = os.getenv("DEBUG_LEVEL", "info").lower()
|
||||
|
||||
output = str(message)
|
||||
if log_type == 0:
|
||||
pass
|
||||
elif log_type == 1 and (debug or debug_level == "info"):
|
||||
output = f"[INFO]: {output}"
|
||||
elif log_type == 2:
|
||||
output = f"[ERROR]: {output}"
|
||||
elif log_type == 3 and (debug and debug_level == "debug"):
|
||||
output = f"[DEBUG]: {output}"
|
||||
else:
|
||||
output = None
|
||||
|
||||
if output is not None:
|
||||
print(output)
|
||||
file = open(logfile, "a", encoding="utf-8")
|
||||
file.write(output + "\n")
|
||||
|
||||
# Reimplementation of distutils.util.strtobool due to it being deprecated
|
||||
# Source: https://github.com/PostHog/posthog/blob/01e184c29d2c10c43166f1d40a334abbc3f99d8a/posthog/utils.py#L668
|
||||
def str_to_bool(value: any) -> bool:
|
||||
if not value:
|
||||
return False
|
||||
return str(value).lower() in ("y", "yes", "t", "true", "on", "1")
|
||||
|
||||
# Get mapped value
|
||||
def search_mapping(dictionary: dict, key_value: str):
|
||||
if key_value in dictionary.keys():
|
||||
return dictionary[key_value]
|
||||
elif key_value.lower() in dictionary.keys():
|
||||
return dictionary[key_value.lower()]
|
||||
elif key_value in dictionary.values():
|
||||
return list(dictionary.keys())[list(dictionary.values()).index(key_value)]
|
||||
elif key_value.lower() in dictionary.values():
|
||||
return list(dictionary.keys())[list(dictionary.values()).index(key_value.lower())]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def check_skip_logic(library_title, library_type, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping):
|
||||
skip_reason = None
|
||||
|
||||
if library_type.lower() in blacklist_library_type:
|
||||
skip_reason = "is blacklist_library_type"
|
||||
|
||||
if library_title.lower() in [x.lower() for x in blacklist_library]:
|
||||
skip_reason = "is blacklist_library"
|
||||
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
if library_other:
|
||||
if library_other.lower() in [x.lower() for x in blacklist_library]:
|
||||
skip_reason = "is blacklist_library"
|
||||
|
||||
if len(whitelist_library_type) > 0:
|
||||
if library_type.lower() not in whitelist_library_type:
|
||||
skip_reason = "is not whitelist_library_type"
|
||||
|
||||
# if whitelist is not empty and library is not in whitelist
|
||||
if len(whitelist_library) > 0:
|
||||
if library_title.lower() not in [x.lower() for x in whitelist_library]:
|
||||
skip_reason = "is not whitelist_library"
|
||||
|
||||
if library_other:
|
||||
if library_other.lower() not in [x.lower() for x in whitelist_library]:
|
||||
skip_reason = "is not whitelist_library"
|
||||
|
||||
return skip_reason
|
||||
|
||||
|
||||
def generate_library_guids_dict(user_list: dict):
|
||||
show_output_dict = {}
|
||||
episode_output_dict = {}
|
||||
movies_output_dict = {}
|
||||
|
||||
try:
|
||||
show_output_keys = user_list.keys()
|
||||
show_output_keys = ([ dict(x) for x in list(show_output_keys) ])
|
||||
for show_key in show_output_keys:
|
||||
for provider_key, provider_value in show_key.items():
|
||||
# Skip title
|
||||
if provider_key.lower() == "title":
|
||||
continue
|
||||
if provider_key.lower() not in show_output_dict:
|
||||
show_output_dict[provider_key.lower()] = []
|
||||
if provider_key.lower() == "locations":
|
||||
for show_location in provider_value:
|
||||
show_output_dict[provider_key.lower()].append(show_location)
|
||||
else:
|
||||
show_output_dict[provider_key.lower()].append(provider_value.lower())
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
for show in user_list:
|
||||
for season in user_list[show]:
|
||||
for episode in user_list[show][season]:
|
||||
for episode_key, episode_value in episode.items():
|
||||
if episode_key.lower() not in episode_output_dict:
|
||||
episode_output_dict[episode_key.lower()] = []
|
||||
if episode_key == "locations":
|
||||
for episode_location in episode_value:
|
||||
episode_output_dict[episode_key.lower()].append(episode_location)
|
||||
else:
|
||||
episode_output_dict[episode_key.lower()].append(episode_value.lower())
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
for movie in user_list:
|
||||
for movie_key, movie_value in movie.items():
|
||||
if movie_key.lower() not in movies_output_dict:
|
||||
movies_output_dict[movie_key.lower()] = []
|
||||
if movie_key == "locations":
|
||||
for movie_location in movie_value:
|
||||
movies_output_dict[movie_key.lower()].append(movie_location)
|
||||
else:
|
||||
movies_output_dict[movie_key.lower()].append(movie_value.lower())
|
||||
except:
|
||||
pass
|
||||
|
||||
return show_output_dict, episode_output_dict, movies_output_dict
|
||||
|
||||
def future_thread_executor(args: list, workers: int = -1):
|
||||
futures_list = []
|
||||
results = []
|
||||
|
||||
if workers == -1:
|
||||
workers = min(32, os.cpu_count()*1.25)
|
||||
|
||||
with ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
for arg in args:
|
||||
# * arg unpacks the list into actual arguments
|
||||
futures_list.append(executor.submit(*arg))
|
||||
|
||||
for future in futures_list:
|
||||
try:
|
||||
result = future.result()
|
||||
results.append(result)
|
||||
except Exception as e:
|
||||
raise Exception(e)
|
||||
|
||||
return results
|
||||
import os
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv(override=True)
|
||||
|
||||
logfile = os.getenv("LOGFILE", "log.log")
|
||||
|
||||
|
||||
def logger(message: str, log_type=0):
|
||||
debug = str_to_bool(os.getenv("DEBUG", "True"))
|
||||
debug_level = os.getenv("DEBUG_LEVEL", "info").lower()
|
||||
|
||||
output = str(message)
|
||||
if log_type == 0:
|
||||
pass
|
||||
elif log_type == 1 and (debug and debug_level == "info"):
|
||||
output = f"[INFO]: {output}"
|
||||
elif log_type == 2:
|
||||
output = f"[ERROR]: {output}"
|
||||
elif log_type == 3 and (debug and debug_level == "debug"):
|
||||
output = f"[DEBUG]: {output}"
|
||||
elif log_type == 4:
|
||||
output = f"[WARNING]: {output}"
|
||||
else:
|
||||
output = None
|
||||
|
||||
if output is not None:
|
||||
print(output)
|
||||
file = open(logfile, "a", encoding="utf-8")
|
||||
file.write(output + "\n")
|
||||
|
||||
|
||||
# Reimplementation of distutils.util.strtobool due to it being deprecated
|
||||
# Source: https://github.com/PostHog/posthog/blob/01e184c29d2c10c43166f1d40a334abbc3f99d8a/posthog/utils.py#L668
|
||||
def str_to_bool(value: any) -> bool:
|
||||
if not value:
|
||||
return False
|
||||
return str(value).lower() in ("y", "yes", "t", "true", "on", "1")
|
||||
|
||||
|
||||
# Get mapped value
|
||||
def search_mapping(dictionary: dict, key_value: str):
|
||||
if key_value in dictionary.keys():
|
||||
return dictionary[key_value]
|
||||
elif key_value.lower() in dictionary.keys():
|
||||
return dictionary[key_value.lower()]
|
||||
elif key_value in dictionary.values():
|
||||
return list(dictionary.keys())[list(dictionary.values()).index(key_value)]
|
||||
elif key_value.lower() in dictionary.values():
|
||||
return list(dictionary.keys())[
|
||||
list(dictionary.values()).index(key_value.lower())
|
||||
]
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def check_skip_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
):
|
||||
skip_reason = None
|
||||
|
||||
if library_type.lower() in blacklist_library_type:
|
||||
skip_reason = "is blacklist_library_type"
|
||||
|
||||
if library_title.lower() in [x.lower() for x in blacklist_library]:
|
||||
skip_reason = "is blacklist_library"
|
||||
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
library_other = search_mapping(library_mapping, library_title)
|
||||
if library_other:
|
||||
if library_other.lower() in [x.lower() for x in blacklist_library]:
|
||||
skip_reason = "is blacklist_library"
|
||||
|
||||
if len(whitelist_library_type) > 0:
|
||||
if library_type.lower() not in whitelist_library_type:
|
||||
skip_reason = "is not whitelist_library_type"
|
||||
|
||||
# if whitelist is not empty and library is not in whitelist
|
||||
if len(whitelist_library) > 0:
|
||||
if library_title.lower() not in [x.lower() for x in whitelist_library]:
|
||||
skip_reason = "is not whitelist_library"
|
||||
|
||||
if library_other:
|
||||
if library_other.lower() not in [x.lower() for x in whitelist_library]:
|
||||
skip_reason = "is not whitelist_library"
|
||||
|
||||
return skip_reason
|
||||
|
||||
|
||||
def generate_library_guids_dict(user_list: dict):
|
||||
show_output_dict = {}
|
||||
episode_output_dict = {}
|
||||
movies_output_dict = {}
|
||||
|
||||
try:
|
||||
show_output_keys = user_list.keys()
|
||||
show_output_keys = [dict(x) for x in list(show_output_keys)]
|
||||
for show_key in show_output_keys:
|
||||
for provider_key, provider_value in show_key.items():
|
||||
# Skip title
|
||||
if provider_key.lower() == "title":
|
||||
continue
|
||||
if provider_key.lower() not in show_output_dict:
|
||||
show_output_dict[provider_key.lower()] = []
|
||||
if provider_key.lower() == "locations":
|
||||
for show_location in provider_value:
|
||||
show_output_dict[provider_key.lower()].append(show_location)
|
||||
else:
|
||||
show_output_dict[provider_key.lower()].append(
|
||||
provider_value.lower()
|
||||
)
|
||||
except Exception:
|
||||
logger("Generating show_output_dict failed, skipping", 1)
|
||||
|
||||
try:
|
||||
for show in user_list:
|
||||
for season in user_list[show]:
|
||||
for episode in user_list[show][season]:
|
||||
for episode_key, episode_value in episode.items():
|
||||
if episode_key.lower() not in episode_output_dict:
|
||||
episode_output_dict[episode_key.lower()] = []
|
||||
if episode_key == "locations":
|
||||
for episode_location in episode_value:
|
||||
episode_output_dict[episode_key.lower()].append(
|
||||
episode_location
|
||||
)
|
||||
else:
|
||||
episode_output_dict[episode_key.lower()].append(
|
||||
episode_value.lower()
|
||||
)
|
||||
except Exception:
|
||||
logger("Generating episode_output_dict failed, skipping", 1)
|
||||
|
||||
try:
|
||||
for movie in user_list:
|
||||
for movie_key, movie_value in movie.items():
|
||||
if movie_key.lower() not in movies_output_dict:
|
||||
movies_output_dict[movie_key.lower()] = []
|
||||
if movie_key == "locations":
|
||||
for movie_location in movie_value:
|
||||
movies_output_dict[movie_key.lower()].append(movie_location)
|
||||
else:
|
||||
movies_output_dict[movie_key.lower()].append(movie_value.lower())
|
||||
except Exception:
|
||||
logger("Generating movies_output_dict failed, skipping", 1)
|
||||
|
||||
return show_output_dict, episode_output_dict, movies_output_dict
|
||||
|
||||
|
||||
def combine_watched_dicts(dicts: list):
|
||||
combined_dict = {}
|
||||
for single_dict in dicts:
|
||||
for key, value in single_dict.items():
|
||||
if key not in combined_dict:
|
||||
combined_dict[key] = {}
|
||||
for subkey, subvalue in value.items():
|
||||
combined_dict[key][subkey] = subvalue
|
||||
|
||||
return combined_dict
|
||||
|
||||
|
||||
def future_thread_executor(args: list, workers: int = -1):
|
||||
futures_list = []
|
||||
results = []
|
||||
|
||||
if workers == -1:
|
||||
workers = min(32, os.cpu_count() * 1.25)
|
||||
|
||||
with ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
for arg in args:
|
||||
# * arg unpacks the list into actual arguments
|
||||
futures_list.append(executor.submit(*arg))
|
||||
|
||||
for future in futures_list:
|
||||
try:
|
||||
result = future.result()
|
||||
results.append(result)
|
||||
except Exception as e:
|
||||
raise Exception(e)
|
||||
|
||||
return results
|
||||
|
||||
973
src/jellyfin.py
973
src/jellyfin.py
@@ -1,327 +1,646 @@
|
||||
import requests
|
||||
from src.functions import logger, search_mapping, str_to_bool, check_skip_logic, generate_library_guids_dict, future_thread_executor
|
||||
|
||||
class Jellyfin():
|
||||
def __init__(self, baseurl, token):
|
||||
self.baseurl = baseurl
|
||||
self.token = token
|
||||
self.session = requests.Session()
|
||||
|
||||
if not self.baseurl:
|
||||
raise Exception("Jellyfin baseurl not set")
|
||||
|
||||
if not self.token:
|
||||
raise Exception("Jellyfin token not set")
|
||||
|
||||
self.users = self.get_users()
|
||||
|
||||
|
||||
def query(self, query, query_type):
|
||||
try:
|
||||
response = None
|
||||
|
||||
headers = {
|
||||
"Accept": "application/json",
|
||||
"X-Emby-Token": self.token
|
||||
}
|
||||
if query_type == "get":
|
||||
response = self.session.get(self.baseurl + query, headers=headers)
|
||||
|
||||
elif query_type == "post":
|
||||
authorization = (
|
||||
'MediaBrowser , '
|
||||
'Client="other", '
|
||||
'Device="script", '
|
||||
'DeviceId="script", '
|
||||
'Version="0.0.0"'
|
||||
)
|
||||
headers["X-Emby-Authorization"] = authorization
|
||||
response = self.session.post(self.baseurl + query, headers=headers)
|
||||
|
||||
return response.json()
|
||||
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Query failed {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def get_users(self):
|
||||
try:
|
||||
users = {}
|
||||
|
||||
query = "/Users"
|
||||
response = self.query(query, "get")
|
||||
|
||||
# If reponse is not empty
|
||||
if response:
|
||||
for user in response:
|
||||
users[user["Name"]] = user["Id"]
|
||||
|
||||
return users
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Get users failed {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def get_user_watched(self, user_name, user_id, library_type, library_id, library_title):
|
||||
try:
|
||||
user_name = user_name.lower()
|
||||
user_watched = {}
|
||||
user_watched[user_name] = {}
|
||||
|
||||
logger(f"Jellyfin: Generating watched for {user_name} in library {library_title}", 0)
|
||||
# Movies
|
||||
if library_type == "Movie":
|
||||
user_watched[user_name][library_title] = []
|
||||
watched = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=true&ParentId={library_id}&Filters=IsPlayed&Fields=ItemCounts,ProviderIds,MediaSources", "get")
|
||||
for movie in watched["Items"]:
|
||||
if movie["UserData"]["Played"] == True:
|
||||
movie_guids = {}
|
||||
movie_guids["title"] = movie["Name"]
|
||||
if movie["ProviderIds"]:
|
||||
# Lowercase movie["ProviderIds"] keys
|
||||
movie_guids = {k.lower(): v for k, v in movie["ProviderIds"].items()}
|
||||
if movie["MediaSources"]:
|
||||
movie_guids["locations"] = tuple([x["Path"].split("/")[-1] for x in movie["MediaSources"]])
|
||||
user_watched[user_name][library_title].append(movie_guids)
|
||||
|
||||
# TV Shows
|
||||
if library_type == "Episode":
|
||||
user_watched[user_name][library_title] = {}
|
||||
watched = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=true&ParentId={library_id}&Fields=ItemCounts,ProviderIds,Path", "get")
|
||||
watched_shows = [x for x in watched["Items"] if x["Type"] == "Series"]
|
||||
|
||||
for show in watched_shows:
|
||||
show_guids = {k.lower(): v for k, v in show["ProviderIds"].items()}
|
||||
show_guids["title"] = show["Name"]
|
||||
show_guids["locations"] = tuple([show["Path"].split("/")[-1]])
|
||||
show_guids = frozenset(show_guids.items())
|
||||
seasons = self.query(f"/Shows/{show['Id']}/Seasons?userId={user_id}&Fields=ItemCounts,ProviderIds", "get")
|
||||
if len(seasons["Items"]) > 0:
|
||||
for season in seasons["Items"]:
|
||||
episodes = self.query(f"/Shows/{show['Id']}/Episodes?seasonId={season['Id']}&userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources", "get")
|
||||
if len(episodes["Items"]) > 0:
|
||||
for episode in episodes["Items"]:
|
||||
if episode["UserData"]["Played"] == True:
|
||||
if episode["ProviderIds"] or episode["MediaSources"]:
|
||||
if show_guids not in user_watched[user_name][library_title]:
|
||||
user_watched[user_name][library_title][show_guids] = {}
|
||||
if season["Name"] not in user_watched[user_name][library_title][show_guids]:
|
||||
user_watched[user_name][library_title][show_guids][season["Name"]] = []
|
||||
|
||||
# Lowercase episode["ProviderIds"] keys
|
||||
episode_guids = {}
|
||||
if episode["ProviderIds"]:
|
||||
episode_guids = {k.lower(): v for k, v in episode["ProviderIds"].items()}
|
||||
if episode["MediaSources"]:
|
||||
episode_guids["locations"] = tuple([x["Path"].split("/")[-1] for x in episode["MediaSources"]])
|
||||
user_watched[user_name][library_title][show_guids][season["Name"]].append(episode_guids)
|
||||
|
||||
return user_watched
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Failed to get watched for {user_name} in library {library_title}, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
|
||||
def get_watched(self, users, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping=None):
|
||||
try:
|
||||
users_watched = {}
|
||||
args = []
|
||||
|
||||
for user_name, user_id in users.items():
|
||||
# Get all libraries
|
||||
user_name = user_name.lower()
|
||||
|
||||
libraries = self.query(f"/Users/{user_id}/Views", "get")["Items"]
|
||||
|
||||
for library in libraries:
|
||||
library_title = library["Name"]
|
||||
library_id = library["Id"]
|
||||
watched = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=true&ParentId={library_id}&Filters=IsPlayed&limit=1", "get")
|
||||
|
||||
if len(watched["Items"]) == 0:
|
||||
logger(f"Jellyfin: No watched items found in library {library_title}", 1)
|
||||
continue
|
||||
else:
|
||||
library_type = watched["Items"][0]["Type"]
|
||||
|
||||
skip_reason = check_skip_logic(library_title, library_type, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping)
|
||||
|
||||
if skip_reason:
|
||||
logger(f"Jellyfin: Skipping library {library_title} {skip_reason}", 1)
|
||||
continue
|
||||
|
||||
args.append([self.get_user_watched, user_name, user_id, library_type, library_id, library_title])
|
||||
|
||||
for user_watched in future_thread_executor(args):
|
||||
for user, user_watched_temp in user_watched.items():
|
||||
if user not in users_watched:
|
||||
users_watched[user] = {}
|
||||
users_watched[user].update(user_watched_temp)
|
||||
|
||||
return users_watched
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Failed to get watched, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def update_user_watched(self, user_name, user_id, library, library_id, videos, dryrun):
|
||||
try:
|
||||
logger(f"Jellyfin: Updating watched for {user_name} in library {library}", 1)
|
||||
videos_shows_ids, videos_episodes_ids, videos_movies_ids = generate_library_guids_dict(videos)
|
||||
|
||||
logger(f"Jellyfin: mark list\nShows: {videos_shows_ids}\nEpisodes: {videos_episodes_ids}\nMovies: {videos_movies_ids}", 1)
|
||||
|
||||
if videos_movies_ids:
|
||||
jellyfin_search = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=false&ParentId={library_id}&isPlayed=false&Fields=ItemCounts,ProviderIds,MediaSources", "get")
|
||||
for jellyfin_video in jellyfin_search["Items"]:
|
||||
movie_found = False
|
||||
|
||||
if "MediaSources" in jellyfin_video:
|
||||
for movie_location in jellyfin_video["MediaSources"]:
|
||||
if movie_location["Path"].split("/")[-1] in videos_movies_ids["locations"]:
|
||||
movie_found = True
|
||||
break
|
||||
|
||||
if not movie_found:
|
||||
for movie_provider_source, movie_provider_id in jellyfin_video["ProviderIds"].items():
|
||||
if movie_provider_source.lower() in videos_movies_ids:
|
||||
if movie_provider_id.lower() in videos_movies_ids[movie_provider_source.lower()]:
|
||||
movie_found = True
|
||||
break
|
||||
|
||||
if movie_found:
|
||||
jellyfin_video_id = jellyfin_video["Id"]
|
||||
msg = f"{jellyfin_video['Name']} as watched for {user_name} in {library} for Jellyfin"
|
||||
if not dryrun:
|
||||
logger(f"Marking {msg}", 0)
|
||||
self.query(f"/Users/{user_id}/PlayedItems/{jellyfin_video_id}", "post")
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
else:
|
||||
logger(f"Jellyfin: Skipping movie {jellyfin_video['Name']} as it is not in mark list for {user_name}", 1)
|
||||
|
||||
|
||||
|
||||
# TV Shows
|
||||
if videos_shows_ids and videos_episodes_ids:
|
||||
jellyfin_search = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=false&ParentId={library_id}&isPlayed=false&Fields=ItemCounts,ProviderIds,Path", "get")
|
||||
jellyfin_shows = [x for x in jellyfin_search["Items"]]
|
||||
|
||||
for jellyfin_show in jellyfin_shows:
|
||||
show_found = False
|
||||
|
||||
if "Path" in jellyfin_show:
|
||||
if jellyfin_show["Path"].split("/")[-1] in videos_shows_ids["locations"]:
|
||||
show_found = True
|
||||
|
||||
if not show_found:
|
||||
for show_provider_source, show_provider_id in jellyfin_show["ProviderIds"].items():
|
||||
if show_provider_source.lower() in videos_shows_ids:
|
||||
if show_provider_id.lower() in videos_shows_ids[show_provider_source.lower()]:
|
||||
show_found = True
|
||||
break
|
||||
|
||||
if show_found:
|
||||
logger(f"Jellyfin: Updating watched for {user_name} in library {library} for show {jellyfin_show['Name']}", 1)
|
||||
jellyfin_show_id = jellyfin_show["Id"]
|
||||
jellyfin_episodes = self.query(f"/Shows/{jellyfin_show_id}/Episodes?userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources", "get")
|
||||
|
||||
for jellyfin_episode in jellyfin_episodes["Items"]:
|
||||
episode_found = False
|
||||
|
||||
if "MediaSources" in jellyfin_episode:
|
||||
for episode_location in jellyfin_episode["MediaSources"]:
|
||||
if episode_location["Path"].split("/")[-1] in videos_episodes_ids["locations"]:
|
||||
episode_found = True
|
||||
break
|
||||
|
||||
if not episode_found:
|
||||
for episode_provider_source, episode_provider_id in jellyfin_episode["ProviderIds"].items():
|
||||
if episode_provider_source.lower() in videos_episodes_ids:
|
||||
if episode_provider_id.lower() in videos_episodes_ids[episode_provider_source.lower()]:
|
||||
episode_found = True
|
||||
break
|
||||
|
||||
if episode_found:
|
||||
jellyfin_episode_id = jellyfin_episode["Id"]
|
||||
msg = f"{jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode['Name']} as watched for {user_name} in {library} for Jellyfin"
|
||||
if not dryrun:
|
||||
logger(f"Marked {msg}", 0)
|
||||
self.query(f"/Users/{user_id}/PlayedItems/{jellyfin_episode_id}", "post")
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
else:
|
||||
logger(f"Jellyfin: Skipping episode {jellyfin_episode['Name']} as it is not in mark list for {user_name}", 1)
|
||||
else:
|
||||
logger(f"Jellyfin: Skipping show {jellyfin_show['Name']} as it is not in mark list for {user_name}", 1)
|
||||
|
||||
if not videos_movies_ids and not videos_shows_ids and not videos_episodes_ids:
|
||||
logger(f"Jellyfin: No videos to mark as watched for {user_name} in library {library}", 1)
|
||||
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Error updating watched for {user_name} in library {library}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
|
||||
def update_watched(self, watched_list, user_mapping=None, library_mapping=None, dryrun=False):
|
||||
try:
|
||||
args = []
|
||||
for user, libraries in watched_list.items():
|
||||
logger(f"Jellyfin: Updating for entry {user}, {libraries}", 1)
|
||||
user_other = None
|
||||
user_name = None
|
||||
if user_mapping:
|
||||
if user in user_mapping.keys():
|
||||
user_other = user_mapping[user]
|
||||
elif user in user_mapping.values():
|
||||
user_other = search_mapping(user_mapping, user)
|
||||
|
||||
user_id = None
|
||||
for key in self.users.keys():
|
||||
if user.lower() == key.lower():
|
||||
user_id = self.users[key]
|
||||
user_name = key
|
||||
break
|
||||
elif user_other and user_other.lower() == key.lower():
|
||||
user_id = self.users[key]
|
||||
user_name = key
|
||||
break
|
||||
|
||||
if not user_id:
|
||||
logger(f"{user} {user_other} not found in Jellyfin", 2)
|
||||
continue
|
||||
|
||||
jellyfin_libraries = self.query(f"/Users/{user_id}/Views", "get")["Items"]
|
||||
|
||||
for library, videos in libraries.items():
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
if library in library_mapping.keys():
|
||||
library_other = library_mapping[library]
|
||||
elif library in library_mapping.values():
|
||||
library_other = search_mapping(library_mapping, library)
|
||||
|
||||
|
||||
if library.lower() not in [x["Name"].lower() for x in jellyfin_libraries]:
|
||||
if library_other:
|
||||
if library_other.lower() in [x["Name"].lower() for x in jellyfin_libraries]:
|
||||
logger(f"Jellyfin: Library {library} not found, but {library_other} found, using {library_other}", 1)
|
||||
library = library_other
|
||||
else:
|
||||
logger(f"Jellyfin: Library {library} or {library_other} not found in library list", 2)
|
||||
continue
|
||||
else:
|
||||
logger(f"Jellyfin: Library {library} not found in library list", 2)
|
||||
continue
|
||||
|
||||
library_id = None
|
||||
for jellyfin_library in jellyfin_libraries:
|
||||
if jellyfin_library["Name"] == library:
|
||||
library_id = jellyfin_library["Id"]
|
||||
continue
|
||||
|
||||
if library_id:
|
||||
args.append([self.update_user_watched, user_name, user_id, library, library_id, videos, dryrun])
|
||||
|
||||
future_thread_executor(args)
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Error updating watched", 2)
|
||||
raise Exception(e)
|
||||
import asyncio, aiohttp
|
||||
from src.functions import (
|
||||
logger,
|
||||
search_mapping,
|
||||
check_skip_logic,
|
||||
generate_library_guids_dict,
|
||||
combine_watched_dicts,
|
||||
)
|
||||
|
||||
|
||||
class Jellyfin:
|
||||
def __init__(self, baseurl, token):
|
||||
self.baseurl = baseurl
|
||||
self.token = token
|
||||
|
||||
if not self.baseurl:
|
||||
raise Exception("Jellyfin baseurl not set")
|
||||
|
||||
if not self.token:
|
||||
raise Exception("Jellyfin token not set")
|
||||
|
||||
self.users = asyncio.run(self.get_users())
|
||||
|
||||
async def query(self, query, query_type, session, identifiers=None):
|
||||
try:
|
||||
results = None
|
||||
headers = {"Accept": "application/json", "X-Emby-Token": self.token}
|
||||
authorization = (
|
||||
"MediaBrowser , "
|
||||
'Client="other", '
|
||||
'Device="script", '
|
||||
'DeviceId="script", '
|
||||
'Version="0.0.0"'
|
||||
)
|
||||
headers["X-Emby-Authorization"] = authorization
|
||||
|
||||
if query_type == "get":
|
||||
async with session.get(
|
||||
self.baseurl + query, headers=headers
|
||||
) as response:
|
||||
results = await response.json()
|
||||
|
||||
elif query_type == "post":
|
||||
async with session.post(
|
||||
self.baseurl + query, headers=headers
|
||||
) as response:
|
||||
results = await response.json()
|
||||
|
||||
# append identifiers to results
|
||||
if identifiers:
|
||||
results["Identifiers"] = identifiers
|
||||
return results
|
||||
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Query failed {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
async def get_users(self):
|
||||
try:
|
||||
users = {}
|
||||
|
||||
query_string = "/Users"
|
||||
async with aiohttp.ClientSession() as session:
|
||||
response = await self.query(query_string, "get", session)
|
||||
|
||||
# If reponse is not empty
|
||||
if response:
|
||||
for user in response:
|
||||
users[user["Name"]] = user["Id"]
|
||||
|
||||
return users
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Get users failed {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
async def get_user_watched(
|
||||
self, user_name, user_id, library_type, library_id, library_title
|
||||
):
|
||||
try:
|
||||
user_name = user_name.lower()
|
||||
user_watched = {}
|
||||
user_watched[user_name] = {}
|
||||
|
||||
logger(
|
||||
f"Jellyfin: Generating watched for {user_name} in library {library_title}",
|
||||
0,
|
||||
)
|
||||
# Movies
|
||||
async with aiohttp.ClientSession() as session:
|
||||
if library_type == "Movie":
|
||||
user_watched[user_name][library_title] = []
|
||||
watched = await self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&Filters=IsPlayed&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||
"get",
|
||||
session,
|
||||
)
|
||||
for movie in watched["Items"]:
|
||||
if movie["UserData"]["Played"] is True:
|
||||
movie_guids = {}
|
||||
movie_guids["title"] = movie["Name"]
|
||||
if "ProviderIds" in movie:
|
||||
# Lowercase movie["ProviderIds"] keys
|
||||
movie_guids = {
|
||||
k.lower(): v
|
||||
for k, v in movie["ProviderIds"].items()
|
||||
}
|
||||
if "MediaSources" in movie:
|
||||
movie_guids["locations"] = tuple(
|
||||
[
|
||||
x["Path"].split("/")[-1]
|
||||
for x in movie["MediaSources"]
|
||||
]
|
||||
)
|
||||
user_watched[user_name][library_title].append(movie_guids)
|
||||
|
||||
# TV Shows
|
||||
if library_type == "Series":
|
||||
user_watched[user_name][library_title] = {}
|
||||
watched_shows = await self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&isPlaceHolder=false&Fields=ProviderIds,Path,RecursiveItemCount",
|
||||
"get",
|
||||
session,
|
||||
)
|
||||
watched_shows_filtered = []
|
||||
for show in watched_shows["Items"]:
|
||||
if "PlayedPercentage" in show["UserData"]:
|
||||
if show["UserData"]["PlayedPercentage"] > 0:
|
||||
watched_shows_filtered.append(show)
|
||||
seasons_tasks = []
|
||||
for show in watched_shows_filtered:
|
||||
show_guids = {
|
||||
k.lower(): v for k, v in show["ProviderIds"].items()
|
||||
}
|
||||
show_guids["title"] = show["Name"]
|
||||
show_guids["locations"] = tuple([show["Path"].split("/")[-1]])
|
||||
show_guids = frozenset(show_guids.items())
|
||||
identifiers = {"show_guids": show_guids, "show_id": show["Id"]}
|
||||
task = asyncio.ensure_future(
|
||||
self.query(
|
||||
f"/Shows/{show['Id']}/Seasons"
|
||||
+ f"?userId={user_id}&isPlaceHolder=false&Fields=ProviderIds,RecursiveItemCount",
|
||||
"get",
|
||||
session,
|
||||
frozenset(identifiers.items()),
|
||||
)
|
||||
)
|
||||
seasons_tasks.append(task)
|
||||
|
||||
seasons_watched = await asyncio.gather(*seasons_tasks)
|
||||
seasons_watched_filtered = []
|
||||
|
||||
for seasons in seasons_watched:
|
||||
seasons_watched_filtered_dict = {}
|
||||
seasons_watched_filtered_dict["Identifiers"] = seasons[
|
||||
"Identifiers"
|
||||
]
|
||||
seasons_watched_filtered_dict["Items"] = []
|
||||
for season in seasons["Items"]:
|
||||
if "PlayedPercentage" in season["UserData"]:
|
||||
if season["UserData"]["PlayedPercentage"] > 0:
|
||||
seasons_watched_filtered_dict["Items"].append(
|
||||
season
|
||||
)
|
||||
|
||||
if seasons_watched_filtered_dict["Items"]:
|
||||
seasons_watched_filtered.append(
|
||||
seasons_watched_filtered_dict
|
||||
)
|
||||
|
||||
episodes_tasks = []
|
||||
for seasons in seasons_watched_filtered:
|
||||
if len(seasons["Items"]) > 0:
|
||||
for season in seasons["Items"]:
|
||||
season_identifiers = dict(seasons["Identifiers"])
|
||||
season_identifiers["season_id"] = season["Id"]
|
||||
season_identifiers["season_name"] = season["Name"]
|
||||
task = asyncio.ensure_future(
|
||||
self.query(
|
||||
f"/Shows/{season_identifiers['show_id']}/Episodes"
|
||||
+ f"?seasonId={season['Id']}&userId={user_id}&isPlaceHolder=false&isPlayed=true&Fields=ProviderIds,MediaSources",
|
||||
"get",
|
||||
session,
|
||||
frozenset(season_identifiers.items()),
|
||||
)
|
||||
)
|
||||
episodes_tasks.append(task)
|
||||
|
||||
watched_episodes = await asyncio.gather(*episodes_tasks)
|
||||
for episodes in watched_episodes:
|
||||
if len(episodes["Items"]) > 0:
|
||||
for episode in episodes["Items"]:
|
||||
if episode["UserData"]["Played"] is True:
|
||||
if (
|
||||
"ProviderIds" in episode
|
||||
or "MediaSources" in episode
|
||||
):
|
||||
episode_identifiers = dict(
|
||||
episodes["Identifiers"]
|
||||
)
|
||||
show_guids = episode_identifiers["show_guids"]
|
||||
if (
|
||||
show_guids
|
||||
not in user_watched[user_name][
|
||||
library_title
|
||||
]
|
||||
):
|
||||
user_watched[user_name][library_title][
|
||||
show_guids
|
||||
] = {}
|
||||
if (
|
||||
episode_identifiers["season_name"]
|
||||
not in user_watched[user_name][
|
||||
library_title
|
||||
][show_guids]
|
||||
):
|
||||
user_watched[user_name][library_title][
|
||||
show_guids
|
||||
][episode_identifiers["season_name"]] = []
|
||||
|
||||
episode_guids = {}
|
||||
if "ProviderIds" in episode:
|
||||
episode_guids = {
|
||||
k.lower(): v
|
||||
for k, v in episode[
|
||||
"ProviderIds"
|
||||
].items()
|
||||
}
|
||||
if "MediaSources" in episode:
|
||||
episode_guids["locations"] = tuple(
|
||||
[
|
||||
x["Path"].split("/")[-1]
|
||||
for x in episode["MediaSources"]
|
||||
]
|
||||
)
|
||||
user_watched[user_name][library_title][
|
||||
show_guids
|
||||
][episode_identifiers["season_name"]].append(
|
||||
episode_guids
|
||||
)
|
||||
|
||||
return user_watched
|
||||
except Exception as e:
|
||||
logger(
|
||||
f"Jellyfin: Failed to get watched for {user_name} in library {library_title}, Error: {e}",
|
||||
2,
|
||||
)
|
||||
raise Exception(e)
|
||||
|
||||
async def get_users_watched(
|
||||
self,
|
||||
user_name,
|
||||
user_id,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
):
|
||||
try:
|
||||
# Get all libraries
|
||||
user_name = user_name.lower()
|
||||
tasks_watched = []
|
||||
|
||||
tasks_libraries = []
|
||||
async with aiohttp.ClientSession() as session:
|
||||
libraries = await self.query(f"/Users/{user_id}/Views", "get", session)
|
||||
for library in libraries["Items"]:
|
||||
library_id = library["Id"]
|
||||
library_title = library["Name"]
|
||||
identifiers = {
|
||||
"library_id": library_id,
|
||||
"library_title": library_title,
|
||||
}
|
||||
task = asyncio.ensure_future(
|
||||
self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&Filters=IsPlayed&limit=1",
|
||||
"get",
|
||||
session,
|
||||
identifiers=identifiers,
|
||||
)
|
||||
)
|
||||
tasks_libraries.append(task)
|
||||
|
||||
libraries = await asyncio.gather(
|
||||
*tasks_libraries, return_exceptions=True
|
||||
)
|
||||
|
||||
for watched in libraries:
|
||||
if len(watched["Items"]) == 0:
|
||||
continue
|
||||
|
||||
library_id = watched["Identifiers"]["library_id"]
|
||||
library_title = watched["Identifiers"]["library_title"]
|
||||
library_type = watched["Items"][0]["Type"]
|
||||
|
||||
skip_reason = check_skip_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
|
||||
if skip_reason:
|
||||
logger(
|
||||
f"Jellyfin: Skipping library {library_title} {skip_reason}",
|
||||
1,
|
||||
)
|
||||
continue
|
||||
|
||||
# Get watched for user
|
||||
task = asyncio.ensure_future(
|
||||
self.get_user_watched(
|
||||
user_name, user_id, library_type, library_id, library_title
|
||||
)
|
||||
)
|
||||
tasks_watched.append(task)
|
||||
|
||||
watched = await asyncio.gather(*tasks_watched, return_exceptions=True)
|
||||
return watched
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Failed to get users watched, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
async def get_watched(
|
||||
self,
|
||||
users,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping=None,
|
||||
):
|
||||
try:
|
||||
users_watched = {}
|
||||
watched = []
|
||||
|
||||
for user_name, user_id in users.items():
|
||||
watched.append(
|
||||
await self.get_users_watched(
|
||||
user_name,
|
||||
user_id,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
)
|
||||
|
||||
for user_watched in watched:
|
||||
user_watched_temp = combine_watched_dicts(user_watched)
|
||||
for user, user_watched_temp in user_watched_temp.items():
|
||||
if user not in users_watched:
|
||||
users_watched[user] = {}
|
||||
users_watched[user].update(user_watched_temp)
|
||||
|
||||
return users_watched
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Failed to get watched, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
async def update_user_watched(
|
||||
self, user_name, user_id, library, library_id, videos, dryrun
|
||||
):
|
||||
try:
|
||||
logger(
|
||||
f"Jellyfin: Updating watched for {user_name} in library {library}", 1
|
||||
)
|
||||
(
|
||||
videos_shows_ids,
|
||||
videos_episodes_ids,
|
||||
videos_movies_ids,
|
||||
) = generate_library_guids_dict(videos)
|
||||
|
||||
logger(
|
||||
f"Jellyfin: mark list\nShows: {videos_shows_ids}\nEpisodes: {videos_episodes_ids}\nMovies: {videos_movies_ids}",
|
||||
1,
|
||||
)
|
||||
async with aiohttp.ClientSession() as session:
|
||||
if videos_movies_ids:
|
||||
jellyfin_search = await self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=false&ParentId={library_id}"
|
||||
+ "&isPlayed=false&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||
"get",
|
||||
session,
|
||||
)
|
||||
for jellyfin_video in jellyfin_search["Items"]:
|
||||
movie_found = False
|
||||
|
||||
if "MediaSources" in jellyfin_video:
|
||||
for movie_location in jellyfin_video["MediaSources"]:
|
||||
if (
|
||||
movie_location["Path"].split("/")[-1]
|
||||
in videos_movies_ids["locations"]
|
||||
):
|
||||
movie_found = True
|
||||
break
|
||||
|
||||
if not movie_found:
|
||||
for (
|
||||
movie_provider_source,
|
||||
movie_provider_id,
|
||||
) in jellyfin_video["ProviderIds"].items():
|
||||
if movie_provider_source.lower() in videos_movies_ids:
|
||||
if (
|
||||
movie_provider_id.lower()
|
||||
in videos_movies_ids[
|
||||
movie_provider_source.lower()
|
||||
]
|
||||
):
|
||||
movie_found = True
|
||||
break
|
||||
|
||||
if movie_found:
|
||||
jellyfin_video_id = jellyfin_video["Id"]
|
||||
msg = f"{jellyfin_video['Name']} as watched for {user_name} in {library} for Jellyfin"
|
||||
if not dryrun:
|
||||
logger(f"Marking {msg}", 0)
|
||||
await self.query(
|
||||
f"/Users/{user_id}/PlayedItems/{jellyfin_video_id}",
|
||||
"post",
|
||||
session,
|
||||
)
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
else:
|
||||
logger(
|
||||
f"Jellyfin: Skipping movie {jellyfin_video['Name']} as it is not in mark list for {user_name}",
|
||||
1,
|
||||
)
|
||||
|
||||
# TV Shows
|
||||
if videos_shows_ids and videos_episodes_ids:
|
||||
jellyfin_search = await self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=false&ParentId={library_id}"
|
||||
+ "&isPlayed=false&Fields=ItemCounts,ProviderIds,Path",
|
||||
"get",
|
||||
session,
|
||||
)
|
||||
jellyfin_shows = [x for x in jellyfin_search["Items"]]
|
||||
|
||||
for jellyfin_show in jellyfin_shows:
|
||||
show_found = False
|
||||
|
||||
if "Path" in jellyfin_show:
|
||||
if (
|
||||
jellyfin_show["Path"].split("/")[-1]
|
||||
in videos_shows_ids["locations"]
|
||||
):
|
||||
show_found = True
|
||||
|
||||
if not show_found:
|
||||
for show_provider_source, show_provider_id in jellyfin_show[
|
||||
"ProviderIds"
|
||||
].items():
|
||||
if show_provider_source.lower() in videos_shows_ids:
|
||||
if (
|
||||
show_provider_id.lower()
|
||||
in videos_shows_ids[
|
||||
show_provider_source.lower()
|
||||
]
|
||||
):
|
||||
show_found = True
|
||||
break
|
||||
|
||||
if show_found:
|
||||
logger(
|
||||
f"Jellyfin: Updating watched for {user_name} in library {library} for show {jellyfin_show['Name']}",
|
||||
1,
|
||||
)
|
||||
jellyfin_show_id = jellyfin_show["Id"]
|
||||
jellyfin_episodes = await self.query(
|
||||
f"/Shows/{jellyfin_show_id}/Episodes"
|
||||
+ f"?userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||
"get",
|
||||
session,
|
||||
)
|
||||
|
||||
for jellyfin_episode in jellyfin_episodes["Items"]:
|
||||
episode_found = False
|
||||
|
||||
if "MediaSources" in jellyfin_episode:
|
||||
for episode_location in jellyfin_episode[
|
||||
"MediaSources"
|
||||
]:
|
||||
if (
|
||||
episode_location["Path"].split("/")[-1]
|
||||
in videos_episodes_ids["locations"]
|
||||
):
|
||||
episode_found = True
|
||||
break
|
||||
|
||||
if not episode_found:
|
||||
for (
|
||||
episode_provider_source,
|
||||
episode_provider_id,
|
||||
) in jellyfin_episode["ProviderIds"].items():
|
||||
if (
|
||||
episode_provider_source.lower()
|
||||
in videos_episodes_ids
|
||||
):
|
||||
if (
|
||||
episode_provider_id.lower()
|
||||
in videos_episodes_ids[
|
||||
episode_provider_source.lower()
|
||||
]
|
||||
):
|
||||
episode_found = True
|
||||
break
|
||||
|
||||
if episode_found:
|
||||
jellyfin_episode_id = jellyfin_episode["Id"]
|
||||
msg = (
|
||||
f"{jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode['Name']}"
|
||||
+ f" as watched for {user_name} in {library} for Jellyfin"
|
||||
)
|
||||
if not dryrun:
|
||||
logger(f"Marked {msg}", 0)
|
||||
await self.query(
|
||||
f"/Users/{user_id}/PlayedItems/{jellyfin_episode_id}",
|
||||
"post",
|
||||
session,
|
||||
)
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
else:
|
||||
logger(
|
||||
f"Jellyfin: Skipping episode {jellyfin_episode['Name']} as it is not in mark list for {user_name}",
|
||||
1,
|
||||
)
|
||||
else:
|
||||
logger(
|
||||
f"Jellyfin: Skipping show {jellyfin_show['Name']} as it is not in mark list for {user_name}",
|
||||
1,
|
||||
)
|
||||
|
||||
if (
|
||||
not videos_movies_ids
|
||||
and not videos_shows_ids
|
||||
and not videos_episodes_ids
|
||||
):
|
||||
logger(
|
||||
f"Jellyfin: No videos to mark as watched for {user_name} in library {library}",
|
||||
1,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger(
|
||||
f"Jellyfin: Error updating watched for {user_name} in library {library}, {e}",
|
||||
2,
|
||||
)
|
||||
raise Exception(e)
|
||||
|
||||
async def update_watched(
|
||||
self, watched_list, user_mapping=None, library_mapping=None, dryrun=False
|
||||
):
|
||||
try:
|
||||
tasks = []
|
||||
async with aiohttp.ClientSession() as session:
|
||||
for user, libraries in watched_list.items():
|
||||
logger(f"Jellyfin: Updating for entry {user}, {libraries}", 1)
|
||||
user_other = None
|
||||
user_name = None
|
||||
if user_mapping:
|
||||
if user in user_mapping.keys():
|
||||
user_other = user_mapping[user]
|
||||
elif user in user_mapping.values():
|
||||
user_other = search_mapping(user_mapping, user)
|
||||
|
||||
user_id = None
|
||||
for key in self.users.keys():
|
||||
if user.lower() == key.lower():
|
||||
user_id = self.users[key]
|
||||
user_name = key
|
||||
break
|
||||
elif user_other and user_other.lower() == key.lower():
|
||||
user_id = self.users[key]
|
||||
user_name = key
|
||||
break
|
||||
|
||||
if not user_id:
|
||||
logger(f"{user} {user_other} not found in Jellyfin", 2)
|
||||
continue
|
||||
|
||||
jellyfin_libraries = await self.query(
|
||||
f"/Users/{user_id}/Views", "get", session
|
||||
)
|
||||
jellyfin_libraries = [x for x in jellyfin_libraries["Items"]]
|
||||
|
||||
for library, videos in libraries.items():
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
if library in library_mapping.keys():
|
||||
library_other = library_mapping[library]
|
||||
elif library in library_mapping.values():
|
||||
library_other = search_mapping(library_mapping, library)
|
||||
|
||||
if library.lower() not in [
|
||||
x["Name"].lower() for x in jellyfin_libraries
|
||||
]:
|
||||
if library_other:
|
||||
if library_other.lower() in [
|
||||
x["Name"].lower() for x in jellyfin_libraries
|
||||
]:
|
||||
logger(
|
||||
f"Jellyfin: Library {library} not found, but {library_other} found, using {library_other}",
|
||||
1,
|
||||
)
|
||||
library = library_other
|
||||
else:
|
||||
logger(
|
||||
f"Jellyfin: Library {library} or {library_other} not found in library list",
|
||||
2,
|
||||
)
|
||||
continue
|
||||
else:
|
||||
logger(
|
||||
f"Jellyfin: Library {library} not found in library list",
|
||||
2,
|
||||
)
|
||||
continue
|
||||
|
||||
library_id = None
|
||||
for jellyfin_library in jellyfin_libraries:
|
||||
if jellyfin_library["Name"] == library:
|
||||
library_id = jellyfin_library["Id"]
|
||||
continue
|
||||
|
||||
if library_id:
|
||||
task = self.update_user_watched(
|
||||
user_name, user_id, library, library_id, videos, dryrun
|
||||
)
|
||||
tasks.append(task)
|
||||
|
||||
await asyncio.gather(*tasks, return_exceptions=True)
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Error updating watched, {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
1103
src/main.py
1103
src/main.py
File diff suppressed because it is too large
Load Diff
742
src/plex.py
742
src/plex.py
@@ -1,312 +1,430 @@
|
||||
import re, requests
|
||||
|
||||
from plexapi.server import PlexServer
|
||||
from plexapi.myplex import MyPlexAccount
|
||||
|
||||
from src.functions import logger, search_mapping, check_skip_logic, generate_library_guids_dict, future_thread_executor
|
||||
|
||||
|
||||
# class plex accept base url and token and username and password but default with none
|
||||
class Plex:
|
||||
def __init__(self, baseurl=None, token=None, username=None, password=None, servername=None, ssl_bypass=False):
|
||||
self.baseurl = baseurl
|
||||
self.token = token
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.servername = servername
|
||||
self.plex = self.login()
|
||||
self.admin_user = self.plex.myPlexAccount()
|
||||
self.users = self.get_users()
|
||||
|
||||
def login(self):
|
||||
try:
|
||||
if self.baseurl and self.token:
|
||||
# Login via token
|
||||
plex = PlexServer(self.baseurl, self.token)
|
||||
elif self.username and self.password and self.servername:
|
||||
# Login via plex account
|
||||
account = MyPlexAccount(self.username, self.password)
|
||||
plex = account.resource(self.servername).connect()
|
||||
else:
|
||||
raise Exception("No complete plex credentials provided")
|
||||
|
||||
return plex
|
||||
except Exception as e:
|
||||
if self.username or self.password:
|
||||
msg = f"Failed to login via plex account {self.username}"
|
||||
logger(f"Plex: Failed to login, {msg}, Error: {e}", 2)
|
||||
else:
|
||||
logger(f"Plex: Failed to login, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
|
||||
def get_users(self):
|
||||
try:
|
||||
users = self.plex.myPlexAccount().users()
|
||||
|
||||
# append self to users
|
||||
users.append(self.plex.myPlexAccount())
|
||||
|
||||
return users
|
||||
except Exception as e:
|
||||
logger(f"Plex: Failed to get users, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def get_user_watched(self, user, user_plex, library):
|
||||
try:
|
||||
user_name = user.title.lower()
|
||||
user_watched = {}
|
||||
user_watched[user_name] = {}
|
||||
|
||||
logger(f"Plex: Generating watched for {user_name} in library {library.title}", 0)
|
||||
|
||||
if library.type == "movie":
|
||||
user_watched[user_name][library.title] = []
|
||||
|
||||
library_videos = user_plex.library.section(library.title)
|
||||
for video in library_videos.search(unwatched=False):
|
||||
movie_guids = {}
|
||||
for guid in video.guids:
|
||||
guid_source = re.search(r'(.*)://', guid.id).group(1).lower()
|
||||
guid_id = re.search(r'://(.*)', guid.id).group(1)
|
||||
movie_guids[guid_source] = guid_id
|
||||
|
||||
movie_guids["title"] = video.title
|
||||
movie_guids["locations"] = tuple([x.split("/")[-1] for x in video.locations])
|
||||
|
||||
user_watched[user_name][library.title].append(movie_guids)
|
||||
|
||||
elif library.type == "show":
|
||||
user_watched[user_name][library.title] = {}
|
||||
|
||||
library_videos = user_plex.library.section(library.title)
|
||||
for show in library_videos.search(unwatched=False):
|
||||
show_guids = {}
|
||||
for show_guid in show.guids:
|
||||
# Extract after :// from guid.id
|
||||
show_guid_source = re.search(r'(.*)://', show_guid.id).group(1).lower()
|
||||
show_guid_id = re.search(r'://(.*)', show_guid.id).group(1)
|
||||
show_guids[show_guid_source] = show_guid_id
|
||||
|
||||
show_guids["title"] = show.title
|
||||
show_guids["locations"] = tuple([x.split("/")[-1] for x in show.locations])
|
||||
show_guids = frozenset(show_guids.items())
|
||||
|
||||
for season in show.seasons():
|
||||
episode_guids = []
|
||||
for episode in season.episodes():
|
||||
if episode.viewCount > 0:
|
||||
episode_guids_temp = {}
|
||||
for guid in episode.guids:
|
||||
# Extract after :// from guid.id
|
||||
guid_source = re.search(r'(.*)://', guid.id).group(1).lower()
|
||||
guid_id = re.search(r'://(.*)', guid.id).group(1)
|
||||
episode_guids_temp[guid_source] = guid_id
|
||||
|
||||
episode_guids_temp["locations"] = tuple([x.split("/")[-1] for x in episode.locations])
|
||||
episode_guids.append(episode_guids_temp)
|
||||
|
||||
if episode_guids:
|
||||
# append show, season, episode
|
||||
if show_guids not in user_watched[user_name][library.title]:
|
||||
user_watched[user_name][library.title][show_guids] = {}
|
||||
if season.title not in user_watched[user_name][library.title][show_guids]:
|
||||
user_watched[user_name][library.title][show_guids][season.title] = {}
|
||||
user_watched[user_name][library.title][show_guids][season.title] = episode_guids
|
||||
|
||||
|
||||
return user_watched
|
||||
except Exception as e:
|
||||
logger(f"Plex: Failed to get watched for {user_name} in library {library.title}, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
|
||||
def get_watched(self, users, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping):
|
||||
try:
|
||||
# Get all libraries
|
||||
users_watched = {}
|
||||
args = []
|
||||
|
||||
for user in users:
|
||||
if self.admin_user == user:
|
||||
user_plex = self.plex
|
||||
else:
|
||||
user_plex = PlexServer(self.plex._baseurl, user.get_token(self.plex.machineIdentifier))
|
||||
|
||||
libraries = user_plex.library.sections()
|
||||
|
||||
for library in libraries:
|
||||
library_title = library.title
|
||||
library_type = library.type
|
||||
|
||||
skip_reason = check_skip_logic(library_title, library_type, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping)
|
||||
|
||||
if skip_reason:
|
||||
logger(f"Plex: Skipping library {library_title} {skip_reason}", 1)
|
||||
continue
|
||||
|
||||
args.append([self.get_user_watched, user, user_plex, library])
|
||||
|
||||
for user_watched in future_thread_executor(args):
|
||||
for user, user_watched_temp in user_watched.items():
|
||||
if user not in users_watched:
|
||||
users_watched[user] = {}
|
||||
users_watched[user].update(user_watched_temp)
|
||||
|
||||
return users_watched
|
||||
except Exception as e:
|
||||
logger(f"Plex: Failed to get watched, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
|
||||
def update_user_watched (self, user, user_plex, library, videos, dryrun):
|
||||
try:
|
||||
logger(f"Plex: Updating watched for {user.title} in library {library}", 1)
|
||||
videos_shows_ids, videos_episodes_ids, videos_movies_ids = generate_library_guids_dict(videos)
|
||||
logger(f"Plex: mark list\nShows: {videos_shows_ids}\nEpisodes: {videos_episodes_ids}\nMovies: {videos_movies_ids}", 1)
|
||||
|
||||
library_videos = user_plex.library.section(library)
|
||||
if videos_movies_ids:
|
||||
for movies_search in library_videos.search(unwatched=True):
|
||||
movie_found = False
|
||||
for movie_location in movies_search.locations:
|
||||
if movie_location.split("/")[-1] in videos_movies_ids["locations"]:
|
||||
movie_found = True
|
||||
break
|
||||
|
||||
if not movie_found:
|
||||
for movie_guid in movies_search.guids:
|
||||
movie_guid_source = re.search(r'(.*)://', movie_guid.id).group(1).lower()
|
||||
movie_guid_id = re.search(r'://(.*)', movie_guid.id).group(1)
|
||||
|
||||
# If movie provider source and movie provider id are in videos_movie_ids exactly, then the movie is in the list
|
||||
if movie_guid_source in videos_movies_ids.keys():
|
||||
if movie_guid_id in videos_movies_ids[movie_guid_source]:
|
||||
movie_found = True
|
||||
break
|
||||
|
||||
if movie_found:
|
||||
msg = f"{movies_search.title} as watched for {user.title} in {library} for Plex"
|
||||
if not dryrun:
|
||||
logger(f"Marked {msg}", 0)
|
||||
movies_search.markWatched()
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
else:
|
||||
logger(f"Plex: Skipping movie {movies_search.title} as it is not in mark list for {user.title}", 1)
|
||||
|
||||
|
||||
if videos_shows_ids and videos_episodes_ids:
|
||||
for show_search in library_videos.search(unwatched=True):
|
||||
show_found = False
|
||||
for show_location in show_search.locations:
|
||||
if show_location.split("/")[-1] in videos_shows_ids["locations"]:
|
||||
show_found = True
|
||||
break
|
||||
|
||||
if not show_found:
|
||||
for show_guid in show_search.guids:
|
||||
show_guid_source = re.search(r'(.*)://', show_guid.id).group(1).lower()
|
||||
show_guid_id = re.search(r'://(.*)', show_guid.id).group(1)
|
||||
|
||||
# If show provider source and show provider id are in videos_shows_ids exactly, then the show is in the list
|
||||
if show_guid_source in videos_shows_ids.keys():
|
||||
if show_guid_id in videos_shows_ids[show_guid_source]:
|
||||
show_found = True
|
||||
break
|
||||
|
||||
if show_found:
|
||||
for episode_search in show_search.episodes():
|
||||
episode_found = False
|
||||
|
||||
for episode_location in episode_search.locations:
|
||||
if episode_location.split("/")[-1] in videos_episodes_ids["locations"]:
|
||||
episode_found = True
|
||||
break
|
||||
|
||||
if not episode_found:
|
||||
for episode_guid in episode_search.guids:
|
||||
episode_guid_source = re.search(r'(.*)://', episode_guid.id).group(1).lower()
|
||||
episode_guid_id = re.search(r'://(.*)', episode_guid.id).group(1)
|
||||
|
||||
# If episode provider source and episode provider id are in videos_episodes_ids exactly, then the episode is in the list
|
||||
if episode_guid_source in videos_episodes_ids.keys():
|
||||
if episode_guid_id in videos_episodes_ids[episode_guid_source]:
|
||||
episode_found = True
|
||||
break
|
||||
|
||||
if episode_found:
|
||||
msg = f"{show_search.title} {episode_search.title} as watched for {user.title} in {library} for Plex"
|
||||
if not dryrun:
|
||||
logger(f"Marked {msg}", 0)
|
||||
episode_search.markWatched()
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
else:
|
||||
logger(f"Plex: Skipping episode {episode_search.title} as it is not in mark list for {user.title}", 1)
|
||||
else:
|
||||
logger(f"Plex: Skipping show {show_search.title} as it is not in mark list for {user.title}", 1)
|
||||
|
||||
if not videos_movies_ids and not videos_shows_ids and not videos_episodes_ids:
|
||||
logger(f"Jellyfin: No videos to mark as watched for {user.title} in library {library}", 1)
|
||||
|
||||
except Exception as e:
|
||||
logger(f"Plex: Failed to update watched for {user.title} in library {library}, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
|
||||
def update_watched(self, watched_list, user_mapping=None, library_mapping=None, dryrun=False):
|
||||
try:
|
||||
args = []
|
||||
|
||||
for user, libraries in watched_list.items():
|
||||
user_other = None
|
||||
# If type of user is dict
|
||||
if user_mapping:
|
||||
if user in user_mapping.keys():
|
||||
user_other = user_mapping[user]
|
||||
elif user in user_mapping.values():
|
||||
user_other = search_mapping(user_mapping, user)
|
||||
|
||||
for index, value in enumerate(self.users):
|
||||
if user.lower() == value.title.lower():
|
||||
user = self.users[index]
|
||||
break
|
||||
elif user_other and user_other.lower() == value.title.lower():
|
||||
user = self.users[index]
|
||||
break
|
||||
|
||||
if self.admin_user == user:
|
||||
user_plex = self.plex
|
||||
else:
|
||||
user_plex = PlexServer(self.plex._baseurl, user.get_token(self.plex.machineIdentifier))
|
||||
|
||||
for library, videos in libraries.items():
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
if library in library_mapping.keys():
|
||||
library_other = library_mapping[library]
|
||||
elif library in library_mapping.values():
|
||||
library_other = search_mapping(library_mapping, library)
|
||||
|
||||
# if library in plex library list
|
||||
library_list = user_plex.library.sections()
|
||||
if library.lower() not in [x.title.lower() for x in library_list]:
|
||||
if library_other:
|
||||
if library_other.lower() in [x.title.lower() for x in library_list]:
|
||||
logger(f"Plex: Library {library} not found, but {library_other} found, using {library_other}", 1)
|
||||
library = library_other
|
||||
else:
|
||||
logger(f"Plex: Library {library} or {library_other} not found in library list", 2)
|
||||
continue
|
||||
else:
|
||||
logger(f"Plex: Library {library} not found in library list", 2)
|
||||
continue
|
||||
|
||||
|
||||
args.append([self.update_user_watched, user, user_plex, library, videos, dryrun])
|
||||
|
||||
future_thread_executor(args)
|
||||
except Exception as e:
|
||||
logger(f"Plex: Failed to update watched, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
import re, requests
|
||||
|
||||
from plexapi.server import PlexServer
|
||||
from plexapi.myplex import MyPlexAccount
|
||||
|
||||
from src.functions import (
|
||||
logger,
|
||||
search_mapping,
|
||||
check_skip_logic,
|
||||
generate_library_guids_dict,
|
||||
future_thread_executor,
|
||||
)
|
||||
|
||||
|
||||
def get_user_watched(user, user_plex, library):
|
||||
try:
|
||||
user_name = user.title.lower()
|
||||
user_watched = {}
|
||||
user_watched[user_name] = {}
|
||||
|
||||
logger(
|
||||
f"Plex: Generating watched for {user_name} in library {library.title}",
|
||||
0,
|
||||
)
|
||||
|
||||
if library.type == "movie":
|
||||
user_watched[user_name][library.title] = []
|
||||
|
||||
library_videos = user_plex.library.section(library.title)
|
||||
for video in library_videos.search(unwatched=False):
|
||||
movie_guids = {}
|
||||
for guid in video.guids:
|
||||
guid_source = re.search(r"(.*)://", guid.id).group(1).lower()
|
||||
guid_id = re.search(r"://(.*)", guid.id).group(1)
|
||||
movie_guids[guid_source] = guid_id
|
||||
|
||||
movie_guids["title"] = video.title
|
||||
movie_guids["locations"] = tuple(
|
||||
[x.split("/")[-1] for x in video.locations]
|
||||
)
|
||||
|
||||
user_watched[user_name][library.title].append(movie_guids)
|
||||
|
||||
elif library.type == "show":
|
||||
user_watched[user_name][library.title] = {}
|
||||
|
||||
library_videos = user_plex.library.section(library.title)
|
||||
for show in library_videos.search(unwatched=False):
|
||||
show_guids = {}
|
||||
for show_guid in show.guids:
|
||||
# Extract after :// from guid.id
|
||||
show_guid_source = (
|
||||
re.search(r"(.*)://", show_guid.id).group(1).lower()
|
||||
)
|
||||
show_guid_id = re.search(r"://(.*)", show_guid.id).group(1)
|
||||
show_guids[show_guid_source] = show_guid_id
|
||||
|
||||
show_guids["title"] = show.title
|
||||
show_guids["locations"] = tuple(
|
||||
[x.split("/")[-1] for x in show.locations]
|
||||
)
|
||||
show_guids = frozenset(show_guids.items())
|
||||
|
||||
for season in show.seasons():
|
||||
episode_guids = []
|
||||
for episode in season.episodes():
|
||||
if episode.viewCount > 0:
|
||||
episode_guids_temp = {}
|
||||
for guid in episode.guids:
|
||||
# Extract after :// from guid.id
|
||||
guid_source = (
|
||||
re.search(r"(.*)://", guid.id).group(1).lower()
|
||||
)
|
||||
guid_id = re.search(r"://(.*)", guid.id).group(1)
|
||||
episode_guids_temp[guid_source] = guid_id
|
||||
|
||||
episode_guids_temp["locations"] = tuple(
|
||||
[x.split("/")[-1] for x in episode.locations]
|
||||
)
|
||||
episode_guids.append(episode_guids_temp)
|
||||
|
||||
if episode_guids:
|
||||
# append show, season, episode
|
||||
if show_guids not in user_watched[user_name][library.title]:
|
||||
user_watched[user_name][library.title][show_guids] = {}
|
||||
if (
|
||||
season.title
|
||||
not in user_watched[user_name][library.title][show_guids]
|
||||
):
|
||||
user_watched[user_name][library.title][show_guids][
|
||||
season.title
|
||||
] = {}
|
||||
user_watched[user_name][library.title][show_guids][
|
||||
season.title
|
||||
] = episode_guids
|
||||
|
||||
return user_watched
|
||||
except Exception as e:
|
||||
logger(
|
||||
f"Plex: Failed to get watched for {user_name} in library {library.title}, Error: {e}",
|
||||
2,
|
||||
)
|
||||
raise Exception(e)
|
||||
|
||||
|
||||
def update_user_watched(user, user_plex, library, videos, dryrun):
|
||||
try:
|
||||
logger(f"Plex: Updating watched for {user.title} in library {library}", 1)
|
||||
(
|
||||
videos_shows_ids,
|
||||
videos_episodes_ids,
|
||||
videos_movies_ids,
|
||||
) = generate_library_guids_dict(videos)
|
||||
logger(
|
||||
f"Plex: mark list\nShows: {videos_shows_ids}\nEpisodes: {videos_episodes_ids}\nMovies: {videos_movies_ids}",
|
||||
1,
|
||||
)
|
||||
|
||||
library_videos = user_plex.library.section(library)
|
||||
if videos_movies_ids:
|
||||
for movies_search in library_videos.search(unwatched=True):
|
||||
movie_found = False
|
||||
for movie_location in movies_search.locations:
|
||||
if movie_location.split("/")[-1] in videos_movies_ids["locations"]:
|
||||
movie_found = True
|
||||
break
|
||||
|
||||
if not movie_found:
|
||||
for movie_guid in movies_search.guids:
|
||||
movie_guid_source = (
|
||||
re.search(r"(.*)://", movie_guid.id).group(1).lower()
|
||||
)
|
||||
movie_guid_id = re.search(r"://(.*)", movie_guid.id).group(1)
|
||||
|
||||
# If movie provider source and movie provider id are in videos_movie_ids exactly, then the movie is in the list
|
||||
if movie_guid_source in videos_movies_ids.keys():
|
||||
if movie_guid_id in videos_movies_ids[movie_guid_source]:
|
||||
movie_found = True
|
||||
break
|
||||
|
||||
if movie_found:
|
||||
msg = f"{movies_search.title} as watched for {user.title} in {library} for Plex"
|
||||
if not dryrun:
|
||||
logger(f"Marked {msg}", 0)
|
||||
movies_search.markWatched()
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
else:
|
||||
logger(
|
||||
f"Plex: Skipping movie {movies_search.title} as it is not in mark list for {user.title}",
|
||||
1,
|
||||
)
|
||||
|
||||
if videos_shows_ids and videos_episodes_ids:
|
||||
for show_search in library_videos.search(unwatched=True):
|
||||
show_found = False
|
||||
for show_location in show_search.locations:
|
||||
if show_location.split("/")[-1] in videos_shows_ids["locations"]:
|
||||
show_found = True
|
||||
break
|
||||
|
||||
if not show_found:
|
||||
for show_guid in show_search.guids:
|
||||
show_guid_source = (
|
||||
re.search(r"(.*)://", show_guid.id).group(1).lower()
|
||||
)
|
||||
show_guid_id = re.search(r"://(.*)", show_guid.id).group(1)
|
||||
|
||||
# If show provider source and show provider id are in videos_shows_ids exactly, then the show is in the list
|
||||
if show_guid_source in videos_shows_ids.keys():
|
||||
if show_guid_id in videos_shows_ids[show_guid_source]:
|
||||
show_found = True
|
||||
break
|
||||
|
||||
if show_found:
|
||||
for episode_search in show_search.episodes():
|
||||
episode_found = False
|
||||
|
||||
for episode_location in episode_search.locations:
|
||||
if (
|
||||
episode_location.split("/")[-1]
|
||||
in videos_episodes_ids["locations"]
|
||||
):
|
||||
episode_found = True
|
||||
break
|
||||
|
||||
if not episode_found:
|
||||
for episode_guid in episode_search.guids:
|
||||
episode_guid_source = (
|
||||
re.search(r"(.*)://", episode_guid.id)
|
||||
.group(1)
|
||||
.lower()
|
||||
)
|
||||
episode_guid_id = re.search(
|
||||
r"://(.*)", episode_guid.id
|
||||
).group(1)
|
||||
|
||||
# If episode provider source and episode provider id are in videos_episodes_ids exactly, then the episode is in the list
|
||||
if episode_guid_source in videos_episodes_ids.keys():
|
||||
if (
|
||||
episode_guid_id
|
||||
in videos_episodes_ids[episode_guid_source]
|
||||
):
|
||||
episode_found = True
|
||||
break
|
||||
|
||||
if episode_found:
|
||||
msg = f"{show_search.title} {episode_search.title} as watched for {user.title} in {library} for Plex"
|
||||
if not dryrun:
|
||||
logger(f"Marked {msg}", 0)
|
||||
episode_search.markWatched()
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
else:
|
||||
logger(
|
||||
f"Plex: Skipping episode {episode_search.title} as it is not in mark list for {user.title}",
|
||||
1,
|
||||
)
|
||||
else:
|
||||
logger(
|
||||
f"Plex: Skipping show {show_search.title} as it is not in mark list for {user.title}",
|
||||
1,
|
||||
)
|
||||
|
||||
if not videos_movies_ids and not videos_shows_ids and not videos_episodes_ids:
|
||||
logger(
|
||||
f"Jellyfin: No videos to mark as watched for {user.title} in library {library}",
|
||||
1,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger(
|
||||
f"Plex: Failed to update watched for {user.title} in library {library}, Error: {e}",
|
||||
2,
|
||||
)
|
||||
raise Exception(e)
|
||||
|
||||
|
||||
# class plex accept base url and token and username and password but default with none
|
||||
class Plex:
|
||||
def __init__(
|
||||
self,
|
||||
baseurl=None,
|
||||
token=None,
|
||||
username=None,
|
||||
password=None,
|
||||
servername=None,
|
||||
ssl_bypass=False,
|
||||
):
|
||||
self.baseurl = baseurl
|
||||
self.token = token
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.servername = servername
|
||||
self.plex = self.login(ssl_bypass)
|
||||
self.admin_user = self.plex.myPlexAccount()
|
||||
self.users = self.get_users()
|
||||
|
||||
def login(self, ssl_bypass=False):
|
||||
try:
|
||||
if self.baseurl and self.token:
|
||||
# Login via token
|
||||
if ssl_bypass:
|
||||
session = requests.Session()
|
||||
session.verify = False
|
||||
plex = PlexServer(self.baseurl, self.token, session=session)
|
||||
else:
|
||||
plex = PlexServer(self.baseurl, self.token)
|
||||
elif self.username and self.password and self.servername:
|
||||
# Login via plex account
|
||||
account = MyPlexAccount(self.username, self.password)
|
||||
plex = account.resource(self.servername).connect()
|
||||
else:
|
||||
raise Exception("No complete plex credentials provided")
|
||||
|
||||
return plex
|
||||
except Exception as e:
|
||||
if self.username or self.password:
|
||||
msg = f"Failed to login via plex account {self.username}"
|
||||
logger(f"Plex: Failed to login, {msg}, Error: {e}", 2)
|
||||
else:
|
||||
logger(f"Plex: Failed to login, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def get_users(self):
|
||||
try:
|
||||
users = self.plex.myPlexAccount().users()
|
||||
|
||||
# append self to users
|
||||
users.append(self.plex.myPlexAccount())
|
||||
|
||||
return users
|
||||
except Exception as e:
|
||||
logger(f"Plex: Failed to get users, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def get_watched(
|
||||
self,
|
||||
users,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
):
|
||||
try:
|
||||
# Get all libraries
|
||||
users_watched = {}
|
||||
args = []
|
||||
|
||||
for user in users:
|
||||
if self.admin_user == user:
|
||||
user_plex = self.plex
|
||||
else:
|
||||
user_plex = PlexServer(
|
||||
self.plex._baseurl, user.get_token(self.plex.machineIdentifier)
|
||||
)
|
||||
|
||||
libraries = user_plex.library.sections()
|
||||
|
||||
for library in libraries:
|
||||
library_title = library.title
|
||||
library_type = library.type
|
||||
|
||||
skip_reason = check_skip_logic(
|
||||
library_title,
|
||||
library_type,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
|
||||
if skip_reason:
|
||||
logger(
|
||||
f"Plex: Skipping library {library_title} {skip_reason}", 1
|
||||
)
|
||||
continue
|
||||
|
||||
args.append([get_user_watched, user, user_plex, library])
|
||||
|
||||
for user_watched in future_thread_executor(args):
|
||||
for user, user_watched_temp in user_watched.items():
|
||||
if user not in users_watched:
|
||||
users_watched[user] = {}
|
||||
users_watched[user].update(user_watched_temp)
|
||||
|
||||
return users_watched
|
||||
except Exception as e:
|
||||
logger(f"Plex: Failed to get watched, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def update_watched(
|
||||
self, watched_list, user_mapping=None, library_mapping=None, dryrun=False
|
||||
):
|
||||
try:
|
||||
args = []
|
||||
|
||||
for user, libraries in watched_list.items():
|
||||
user_other = None
|
||||
# If type of user is dict
|
||||
if user_mapping:
|
||||
if user in user_mapping.keys():
|
||||
user_other = user_mapping[user]
|
||||
elif user in user_mapping.values():
|
||||
user_other = search_mapping(user_mapping, user)
|
||||
|
||||
for index, value in enumerate(self.users):
|
||||
if user.lower() == value.title.lower():
|
||||
user = self.users[index]
|
||||
break
|
||||
elif user_other and user_other.lower() == value.title.lower():
|
||||
user = self.users[index]
|
||||
break
|
||||
|
||||
if self.admin_user == user:
|
||||
user_plex = self.plex
|
||||
else:
|
||||
user_plex = PlexServer(
|
||||
self.plex._baseurl, user.get_token(self.plex.machineIdentifier)
|
||||
)
|
||||
|
||||
for library, videos in libraries.items():
|
||||
library_other = None
|
||||
if library_mapping:
|
||||
if library in library_mapping.keys():
|
||||
library_other = library_mapping[library]
|
||||
elif library in library_mapping.values():
|
||||
library_other = search_mapping(library_mapping, library)
|
||||
|
||||
# if library in plex library list
|
||||
library_list = user_plex.library.sections()
|
||||
if library.lower() not in [x.title.lower() for x in library_list]:
|
||||
if library_other:
|
||||
if library_other.lower() in [
|
||||
x.title.lower() for x in library_list
|
||||
]:
|
||||
logger(
|
||||
f"Plex: Library {library} not found, but {library_other} found, using {library_other}",
|
||||
1,
|
||||
)
|
||||
library = library_other
|
||||
else:
|
||||
logger(
|
||||
f"Plex: Library {library} or {library_other} not found in library list",
|
||||
2,
|
||||
)
|
||||
continue
|
||||
else:
|
||||
logger(
|
||||
f"Plex: Library {library} not found in library list", 2
|
||||
)
|
||||
continue
|
||||
|
||||
args.append(
|
||||
[
|
||||
update_user_watched,
|
||||
user,
|
||||
user_plex,
|
||||
library,
|
||||
videos,
|
||||
dryrun,
|
||||
]
|
||||
)
|
||||
|
||||
future_thread_executor(args)
|
||||
except Exception as e:
|
||||
logger(f"Plex: Failed to update watched, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
@@ -1 +1 @@
|
||||
pytest
|
||||
pytest
|
||||
|
||||
78
test/test_main.py
Normal file
78
test/test_main.py
Normal file
@@ -0,0 +1,78 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.main import setup_black_white_lists
|
||||
|
||||
|
||||
def test_setup_black_white_lists():
|
||||
# Simple
|
||||
blacklist_library = "library1, library2"
|
||||
whitelist_library = "library1, library2"
|
||||
blacklist_library_type = "library_type1, library_type2"
|
||||
whitelist_library_type = "library_type1, library_type2"
|
||||
blacklist_users = "user1, user2"
|
||||
whitelist_users = "user1, user2"
|
||||
|
||||
(
|
||||
results_blacklist_library,
|
||||
return_whitelist_library,
|
||||
return_blacklist_library_type,
|
||||
return_whitelist_library_type,
|
||||
return_blacklist_users,
|
||||
return_whitelist_users,
|
||||
) = setup_black_white_lists(
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
blacklist_users,
|
||||
whitelist_users,
|
||||
)
|
||||
|
||||
assert results_blacklist_library == ["library1", "library2"]
|
||||
assert return_whitelist_library == ["library1", "library2"]
|
||||
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_blacklist_users == ["user1", "user2"]
|
||||
assert return_whitelist_users == ["user1", "user2"]
|
||||
|
||||
# Library Mapping and user mapping
|
||||
library_mapping = {"library1": "library3"}
|
||||
user_mapping = {"user1": "user3"}
|
||||
|
||||
(
|
||||
results_blacklist_library,
|
||||
return_whitelist_library,
|
||||
return_blacklist_library_type,
|
||||
return_whitelist_library_type,
|
||||
return_blacklist_users,
|
||||
return_whitelist_users,
|
||||
) = setup_black_white_lists(
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
blacklist_library_type,
|
||||
whitelist_library_type,
|
||||
blacklist_users,
|
||||
whitelist_users,
|
||||
library_mapping,
|
||||
user_mapping,
|
||||
)
|
||||
|
||||
assert results_blacklist_library == ["library1", "library2", "library3"]
|
||||
assert return_whitelist_library == ["library1", "library2", "library3"]
|
||||
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
||||
assert return_blacklist_users == ["user1", "user2", "user3"]
|
||||
assert return_whitelist_users == ["user1", "user2", "user3"]
|
||||
@@ -1,47 +0,0 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.main import setup_black_white_lists
|
||||
|
||||
def test_setup_black_white_lists():
|
||||
# Simple
|
||||
blacklist_library = 'library1, library2'
|
||||
whitelist_library = 'library1, library2'
|
||||
blacklist_library_type = 'library_type1, library_type2'
|
||||
whitelist_library_type = 'library_type1, library_type2'
|
||||
blacklist_users = 'user1, user2'
|
||||
whitelist_users = 'user1, user2'
|
||||
|
||||
results_blacklist_library, return_whitelist_library, return_blacklist_library_type, return_whitelist_library_type, return_blacklist_users, return_whitelist_users = setup_black_white_lists(blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, blacklist_users, whitelist_users)
|
||||
|
||||
assert results_blacklist_library == ['library1', 'library2']
|
||||
assert return_whitelist_library == ['library1', 'library2']
|
||||
assert return_blacklist_library_type == ['library_type1', 'library_type2']
|
||||
assert return_whitelist_library_type == ['library_type1', 'library_type2']
|
||||
assert return_blacklist_users == ['user1', 'user2']
|
||||
assert return_whitelist_users == ['user1', 'user2']
|
||||
|
||||
# Library Mapping and user mapping
|
||||
library_mapping = { "library1": "library3" }
|
||||
user_mapping = { "user1": "user3" }
|
||||
|
||||
results_blacklist_library, return_whitelist_library, return_blacklist_library_type, return_whitelist_library_type, return_blacklist_users, return_whitelist_users = setup_black_white_lists(blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, blacklist_users, whitelist_users, library_mapping, user_mapping)
|
||||
|
||||
assert results_blacklist_library == ['library1', 'library2', 'library3']
|
||||
assert return_whitelist_library == ['library1', 'library2', 'library3']
|
||||
assert return_blacklist_library_type == ['library_type1', 'library_type2']
|
||||
assert return_whitelist_library_type == ['library_type1', 'library_type2']
|
||||
assert return_blacklist_users == ['user1', 'user2', 'user3']
|
||||
assert return_whitelist_users == ['user1', 'user2', 'user3']
|
||||
@@ -1,176 +1,301 @@
|
||||
import sys
|
||||
import os
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.main import cleanup_watched
|
||||
|
||||
tv_shows_watched_list_1 = {
|
||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
||||
"Season 1": [
|
||||
{'imdb': 'tt0550489', 'tmdb': '282843', 'tvdb': '176357', 'locations': ('Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv',)},
|
||||
{'imdb': 'tt0550487', 'tmdb': '282861', 'tvdb': '300385', 'locations': ('Criminal Minds S01E02 Compulsion WEBDL-720p.mkv',)}
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [
|
||||
{'locations': ('Test S01E01.mkv',)},
|
||||
{'locations': ('Test S01E02.mkv',)}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
movies_watched_list_1 = [
|
||||
{"imdb":"tt2380307", "tmdb":"354912", 'title': 'Coco', 'locations': ('Coco (2017) Remux-1080p.mkv',)},
|
||||
{"tmdbcollection":"448150", "imdb":"tt1431045", "tmdb":"293660", 'title': 'Deadpool', 'locations': ('Deadpool (2016) Remux-1080p.mkv',)},
|
||||
]
|
||||
|
||||
tv_shows_watched_list_2 = {
|
||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
||||
"Season 1": [
|
||||
{'imdb': 'tt0550487', 'tmdb': '282861', 'tvdb': '300385', 'locations': ('Criminal Minds S01E02 Compulsion WEBDL-720p.mkv',)},
|
||||
{'imdb': 'tt0550498', 'tmdb': '282865', 'tvdb': '300474', 'locations': ("Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",)}
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [
|
||||
{'locations': ('Test S01E02.mkv',)},
|
||||
{'locations': ('Test S01E03.mkv',)}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
movies_watched_list_2 = [
|
||||
{"imdb":"tt2380307", "tmdb":"354912", 'title': 'Coco', 'locations': ('Coco (2017) Remux-1080p.mkv',)},
|
||||
{'imdb': 'tt0384793', 'tmdb': '9788', 'tvdb': '9103', 'title': 'Accepted', 'locations': ('Accepted (2006) Remux-1080p.mkv',)}
|
||||
]
|
||||
|
||||
# Test to see if objects get deleted all the way up to the root.
|
||||
tv_shows_2_watched_list_1 = {
|
||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
||||
"Season 1": [
|
||||
{'imdb': 'tt0550489', 'tmdb': '282843', 'tvdb': '176357', 'locations': ('Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv',)},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
expected_tv_show_watched_list_1 = {
|
||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
||||
"Season 1": [
|
||||
{'imdb': 'tt0550489', 'tmdb': '282843', 'tvdb': '176357', 'locations': ('Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv',)}
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [
|
||||
{'locations': ('Test S01E01.mkv',)}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
expected_movie_watched_list_1 = [
|
||||
{"tmdbcollection":"448150", "imdb":"tt1431045", "tmdb":"293660", 'title': 'Deadpool', 'locations': ('Deadpool (2016) Remux-1080p.mkv',)}
|
||||
]
|
||||
|
||||
expected_tv_show_watched_list_2 = {
|
||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
||||
"Season 1": [
|
||||
{'imdb': 'tt0550498', 'tmdb': '282865', 'tvdb': '300474', 'locations': ("Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",)}
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [
|
||||
{'locations': ('Test S01E03.mkv',)}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
expected_movie_watched_list_2 = [
|
||||
{'imdb': 'tt0384793', 'tmdb': '9788', 'tvdb': '9103', 'title': 'Accepted', 'locations': ('Accepted (2006) Remux-1080p.mkv',)}
|
||||
]
|
||||
|
||||
|
||||
def test_simple_cleanup_watched():
|
||||
user_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": tv_shows_watched_list_1,
|
||||
"Movies": movies_watched_list_1,
|
||||
"Other Shows": tv_shows_2_watched_list_1
|
||||
},
|
||||
}
|
||||
user_watched_list_2 = {
|
||||
"user1": {
|
||||
"TV Shows": tv_shows_watched_list_2,
|
||||
"Movies": movies_watched_list_2,
|
||||
"Other Shows": tv_shows_2_watched_list_1
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": expected_tv_show_watched_list_1
|
||||
, "Movies": expected_movie_watched_list_1
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_2 = {
|
||||
"user1": {
|
||||
"TV Shows": expected_tv_show_watched_list_2
|
||||
, "Movies": expected_movie_watched_list_2
|
||||
}
|
||||
}
|
||||
|
||||
return_watched_list_1 = cleanup_watched(user_watched_list_1, user_watched_list_2)
|
||||
return_watched_list_2 = cleanup_watched(user_watched_list_2, user_watched_list_1)
|
||||
|
||||
assert return_watched_list_1 == expected_watched_list_1
|
||||
assert return_watched_list_2 == expected_watched_list_2
|
||||
|
||||
|
||||
def test_mapping_cleanup_watched():
|
||||
user_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": tv_shows_watched_list_1,
|
||||
"Movies": movies_watched_list_1,
|
||||
"Other Shows": tv_shows_2_watched_list_1
|
||||
},
|
||||
}
|
||||
user_watched_list_2 = {
|
||||
"user2": {
|
||||
"Shows": tv_shows_watched_list_2,
|
||||
"Movies": movies_watched_list_2,
|
||||
"Other Shows": tv_shows_2_watched_list_1
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": expected_tv_show_watched_list_1
|
||||
, "Movies": expected_movie_watched_list_1
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_2 = {
|
||||
"user2": {
|
||||
"Shows": expected_tv_show_watched_list_2
|
||||
, "Movies": expected_movie_watched_list_2
|
||||
}
|
||||
}
|
||||
|
||||
user_mapping = { "user1": "user2" }
|
||||
library_mapping = { "TV Shows": "Shows" }
|
||||
|
||||
return_watched_list_1 = cleanup_watched(user_watched_list_1, user_watched_list_2, user_mapping=user_mapping, library_mapping=library_mapping)
|
||||
return_watched_list_2 = cleanup_watched(user_watched_list_2, user_watched_list_1, user_mapping=user_mapping, library_mapping=library_mapping)
|
||||
|
||||
assert return_watched_list_1 == expected_watched_list_1
|
||||
assert return_watched_list_2 == expected_watched_list_2
|
||||
import sys
|
||||
import os
|
||||
|
||||
# getting the name of the directory
|
||||
# where the this file is present.
|
||||
current = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
# Getting the parent directory name
|
||||
# where the current directory is present.
|
||||
parent = os.path.dirname(current)
|
||||
|
||||
# adding the parent directory to
|
||||
# the sys.path.
|
||||
sys.path.append(parent)
|
||||
|
||||
from src.main import cleanup_watched
|
||||
|
||||
tv_shows_watched_list_1 = {
|
||||
frozenset(
|
||||
{
|
||||
("tvdb", "75710"),
|
||||
("title", "Criminal Minds"),
|
||||
("imdb", "tt0452046"),
|
||||
("locations", ("Criminal Minds",)),
|
||||
("tmdb", "4057"),
|
||||
}
|
||||
): {
|
||||
"Season 1": [
|
||||
{
|
||||
"imdb": "tt0550489",
|
||||
"tmdb": "282843",
|
||||
"tvdb": "176357",
|
||||
"locations": (
|
||||
"Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",
|
||||
),
|
||||
},
|
||||
{
|
||||
"imdb": "tt0550487",
|
||||
"tmdb": "282861",
|
||||
"tvdb": "300385",
|
||||
"locations": ("Criminal Minds S01E02 Compulsion WEBDL-720p.mkv",),
|
||||
},
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [
|
||||
{"locations": ("Test S01E01.mkv",)},
|
||||
{"locations": ("Test S01E02.mkv",)},
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
movies_watched_list_1 = [
|
||||
{
|
||||
"imdb": "tt2380307",
|
||||
"tmdb": "354912",
|
||||
"title": "Coco",
|
||||
"locations": ("Coco (2017) Remux-1080p.mkv",),
|
||||
},
|
||||
{
|
||||
"tmdbcollection": "448150",
|
||||
"imdb": "tt1431045",
|
||||
"tmdb": "293660",
|
||||
"title": "Deadpool",
|
||||
"locations": ("Deadpool (2016) Remux-1080p.mkv",),
|
||||
},
|
||||
]
|
||||
|
||||
tv_shows_watched_list_2 = {
|
||||
frozenset(
|
||||
{
|
||||
("tvdb", "75710"),
|
||||
("title", "Criminal Minds"),
|
||||
("imdb", "tt0452046"),
|
||||
("locations", ("Criminal Minds",)),
|
||||
("tmdb", "4057"),
|
||||
}
|
||||
): {
|
||||
"Season 1": [
|
||||
{
|
||||
"imdb": "tt0550487",
|
||||
"tmdb": "282861",
|
||||
"tvdb": "300385",
|
||||
"locations": ("Criminal Minds S01E02 Compulsion WEBDL-720p.mkv",),
|
||||
},
|
||||
{
|
||||
"imdb": "tt0550498",
|
||||
"tmdb": "282865",
|
||||
"tvdb": "300474",
|
||||
"locations": (
|
||||
"Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",
|
||||
),
|
||||
},
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [
|
||||
{"locations": ("Test S01E02.mkv",)},
|
||||
{"locations": ("Test S01E03.mkv",)},
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
movies_watched_list_2 = [
|
||||
{
|
||||
"imdb": "tt2380307",
|
||||
"tmdb": "354912",
|
||||
"title": "Coco",
|
||||
"locations": ("Coco (2017) Remux-1080p.mkv",),
|
||||
},
|
||||
{
|
||||
"imdb": "tt0384793",
|
||||
"tmdb": "9788",
|
||||
"tvdb": "9103",
|
||||
"title": "Accepted",
|
||||
"locations": ("Accepted (2006) Remux-1080p.mkv",),
|
||||
},
|
||||
]
|
||||
|
||||
# Test to see if objects get deleted all the way up to the root.
|
||||
tv_shows_2_watched_list_1 = {
|
||||
frozenset(
|
||||
{
|
||||
("tvdb", "75710"),
|
||||
("title", "Criminal Minds"),
|
||||
("imdb", "tt0452046"),
|
||||
("locations", ("Criminal Minds",)),
|
||||
("tmdb", "4057"),
|
||||
}
|
||||
): {
|
||||
"Season 1": [
|
||||
{
|
||||
"imdb": "tt0550489",
|
||||
"tmdb": "282843",
|
||||
"tvdb": "176357",
|
||||
"locations": (
|
||||
"Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",
|
||||
),
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
expected_tv_show_watched_list_1 = {
|
||||
frozenset(
|
||||
{
|
||||
("tvdb", "75710"),
|
||||
("title", "Criminal Minds"),
|
||||
("imdb", "tt0452046"),
|
||||
("locations", ("Criminal Minds",)),
|
||||
("tmdb", "4057"),
|
||||
}
|
||||
): {
|
||||
"Season 1": [
|
||||
{
|
||||
"imdb": "tt0550489",
|
||||
"tmdb": "282843",
|
||||
"tvdb": "176357",
|
||||
"locations": (
|
||||
"Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",
|
||||
),
|
||||
}
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [{"locations": ("Test S01E01.mkv",)}]
|
||||
},
|
||||
}
|
||||
|
||||
expected_movie_watched_list_1 = [
|
||||
{
|
||||
"tmdbcollection": "448150",
|
||||
"imdb": "tt1431045",
|
||||
"tmdb": "293660",
|
||||
"title": "Deadpool",
|
||||
"locations": ("Deadpool (2016) Remux-1080p.mkv",),
|
||||
}
|
||||
]
|
||||
|
||||
expected_tv_show_watched_list_2 = {
|
||||
frozenset(
|
||||
{
|
||||
("tvdb", "75710"),
|
||||
("title", "Criminal Minds"),
|
||||
("imdb", "tt0452046"),
|
||||
("locations", ("Criminal Minds",)),
|
||||
("tmdb", "4057"),
|
||||
}
|
||||
): {
|
||||
"Season 1": [
|
||||
{
|
||||
"imdb": "tt0550498",
|
||||
"tmdb": "282865",
|
||||
"tvdb": "300474",
|
||||
"locations": (
|
||||
"Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",
|
||||
),
|
||||
}
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [{"locations": ("Test S01E03.mkv",)}]
|
||||
},
|
||||
}
|
||||
|
||||
expected_movie_watched_list_2 = [
|
||||
{
|
||||
"imdb": "tt0384793",
|
||||
"tmdb": "9788",
|
||||
"tvdb": "9103",
|
||||
"title": "Accepted",
|
||||
"locations": ("Accepted (2006) Remux-1080p.mkv",),
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def test_simple_cleanup_watched():
|
||||
user_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": tv_shows_watched_list_1,
|
||||
"Movies": movies_watched_list_1,
|
||||
"Other Shows": tv_shows_2_watched_list_1,
|
||||
},
|
||||
}
|
||||
user_watched_list_2 = {
|
||||
"user1": {
|
||||
"TV Shows": tv_shows_watched_list_2,
|
||||
"Movies": movies_watched_list_2,
|
||||
"Other Shows": tv_shows_2_watched_list_1,
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": expected_tv_show_watched_list_1,
|
||||
"Movies": expected_movie_watched_list_1,
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_2 = {
|
||||
"user1": {
|
||||
"TV Shows": expected_tv_show_watched_list_2,
|
||||
"Movies": expected_movie_watched_list_2,
|
||||
}
|
||||
}
|
||||
|
||||
return_watched_list_1 = cleanup_watched(user_watched_list_1, user_watched_list_2)
|
||||
return_watched_list_2 = cleanup_watched(user_watched_list_2, user_watched_list_1)
|
||||
|
||||
assert return_watched_list_1 == expected_watched_list_1
|
||||
assert return_watched_list_2 == expected_watched_list_2
|
||||
|
||||
|
||||
def test_mapping_cleanup_watched():
|
||||
user_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": tv_shows_watched_list_1,
|
||||
"Movies": movies_watched_list_1,
|
||||
"Other Shows": tv_shows_2_watched_list_1,
|
||||
},
|
||||
}
|
||||
user_watched_list_2 = {
|
||||
"user2": {
|
||||
"Shows": tv_shows_watched_list_2,
|
||||
"Movies": movies_watched_list_2,
|
||||
"Other Shows": tv_shows_2_watched_list_1,
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_1 = {
|
||||
"user1": {
|
||||
"TV Shows": expected_tv_show_watched_list_1,
|
||||
"Movies": expected_movie_watched_list_1,
|
||||
}
|
||||
}
|
||||
|
||||
expected_watched_list_2 = {
|
||||
"user2": {
|
||||
"Shows": expected_tv_show_watched_list_2,
|
||||
"Movies": expected_movie_watched_list_2,
|
||||
}
|
||||
}
|
||||
|
||||
user_mapping = {"user1": "user2"}
|
||||
library_mapping = {"TV Shows": "Shows"}
|
||||
|
||||
return_watched_list_1 = cleanup_watched(
|
||||
user_watched_list_1,
|
||||
user_watched_list_2,
|
||||
user_mapping=user_mapping,
|
||||
library_mapping=library_mapping,
|
||||
)
|
||||
return_watched_list_2 = cleanup_watched(
|
||||
user_watched_list_2,
|
||||
user_watched_list_1,
|
||||
user_mapping=user_mapping,
|
||||
library_mapping=library_mapping,
|
||||
)
|
||||
|
||||
assert return_watched_list_1 == expected_watched_list_1
|
||||
assert return_watched_list_2 == expected_watched_list_2
|
||||
|
||||
Reference in New Issue
Block a user