Compare commits
21 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9afc00443c | ||
|
|
3ec177ea64 | ||
|
|
b360c9fd0b | ||
|
|
1ed791b1ed | ||
|
|
f19b1a3063 | ||
|
|
190a72bd3c | ||
|
|
c848106ce7 | ||
|
|
dd319271bd | ||
|
|
16879cc728 | ||
|
|
942ec3533f | ||
|
|
9f6edfc91a | ||
|
|
827ace2e97 | ||
|
|
f6b57a1b4d | ||
|
|
88a7526721 | ||
|
|
1efb4d8543 | ||
|
|
7571e9a343 | ||
|
|
7640e9ee03 | ||
|
|
50ed3d6400 | ||
|
|
c9a373851f | ||
|
|
a3f3db8f4e | ||
|
|
de619de923 |
84
.env.sample
84
.env.sample
@@ -1,42 +1,42 @@
|
|||||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||||
DRYRUN = "True"
|
DRYRUN = "True"
|
||||||
## Additional logging information
|
## Additional logging information
|
||||||
DEBUG = "True"
|
DEBUG = "True"
|
||||||
## Debugging level, "info" is default, "debug" is more verbose
|
## Debugging level, "info" is default, "debug" is more verbose
|
||||||
DEBUG_LEVEL = "info"
|
DEBUG_LEVEL = "info"
|
||||||
## How often to run the script in seconds
|
## How often to run the script in seconds
|
||||||
SLEEP_DURATION = "3600"
|
SLEEP_DURATION = "3600"
|
||||||
## Log file where all output will be written to
|
## Log file where all output will be written to
|
||||||
LOGFILE = "log.log"
|
LOGFILE = "log.log"
|
||||||
## Map usernames between plex and jellyfin in the event that they are different, order does not matter
|
## Map usernames between plex and jellyfin in the event that they are different, order does not matter
|
||||||
#USER_MAPPING = { "testuser2": "testuser3" }
|
#USER_MAPPING = { "testuser2": "testuser3" }
|
||||||
## Map libraries between plex and jellyfin in the even that they are different, order does not matter
|
## Map libraries between plex and jellyfin in the even that they are different, order does not matter
|
||||||
#LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
#LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||||
|
|
||||||
|
|
||||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||||
## Comma seperated list for multiple servers
|
## Comma seperated list for multiple servers
|
||||||
PLEX_BASEURL = "http://localhost:32400"
|
PLEX_BASEURL = "http://localhost:32400"
|
||||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||||
PLEX_TOKEN = "SuperSecretToken"
|
PLEX_TOKEN = "SuperSecretToken"
|
||||||
## If not using plex token then use username and password of the server admin along with the servername
|
## If not using plex token then use username and password of the server admin along with the servername
|
||||||
#PLEX_USERNAME = ""
|
#PLEX_USERNAME = ""
|
||||||
#PLEX_PASSWORD = ""
|
#PLEX_PASSWORD = ""
|
||||||
#PLEX_SERVERNAME = "Plex Server"
|
#PLEX_SERVERNAME = "Plex Server"
|
||||||
|
|
||||||
|
|
||||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||||
## Comma seperated list for multiple servers
|
## Comma seperated list for multiple servers
|
||||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||||
JELLYFIN_TOKEN = "SuperSecretToken"
|
JELLYFIN_TOKEN = "SuperSecretToken"
|
||||||
|
|
||||||
|
|
||||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||||
#BLACKLIST_LIBRARY = ""
|
#BLACKLIST_LIBRARY = ""
|
||||||
#WHITELIST_LIBRARY = ""
|
#WHITELIST_LIBRARY = ""
|
||||||
#BLACKLIST_LIBRARY_TYPE = ""
|
#BLACKLIST_LIBRARY_TYPE = ""
|
||||||
#WHITELIST_LIBRARY_TYPE = ""
|
#WHITELIST_LIBRARY_TYPE = ""
|
||||||
#BLACKLIST_USERS = ""
|
#BLACKLIST_USERS = ""
|
||||||
WHITELIST_USERS = "testuser1,testuser2"
|
WHITELIST_USERS = "testuser1,testuser2"
|
||||||
|
|||||||
172
.github/workflows/ci.yml
vendored
172
.github/workflows/ci.yml
vendored
@@ -1,86 +1,86 @@
|
|||||||
name: CI
|
name: CI
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- .gitignore
|
- .gitignore
|
||||||
- "*.md"
|
- "*.md"
|
||||||
pull_request:
|
pull_request:
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- .gitignore
|
- .gitignore
|
||||||
- "*.md"
|
- "*.md"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pytest:
|
pytest:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: "Install dependencies"
|
- name: "Install dependencies"
|
||||||
run: pip install -r requirements.txt && pip install -r test/requirements.txt
|
run: pip install -r requirements.txt && pip install -r test/requirements.txt
|
||||||
|
|
||||||
- name: "Run tests"
|
- name: "Run tests"
|
||||||
run: pytest -vvv
|
run: pytest -vvv
|
||||||
|
|
||||||
docker:
|
docker:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: pytest
|
needs: pytest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: docker_meta
|
id: docker_meta
|
||||||
env:
|
env:
|
||||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||||
if: "${{ env.DOCKER_USERNAME != '' }}"
|
if: "${{ env.DOCKER_USERNAME != '' }}"
|
||||||
uses: docker/metadata-action@v4
|
uses: docker/metadata-action@v4
|
||||||
with:
|
with:
|
||||||
images: ${{ secrets.DOCKER_USERNAME }}/jellyplex-watched # list of Docker images to use as base name for tags
|
images: ${{ secrets.DOCKER_USERNAME }}/jellyplex-watched # list of Docker images to use as base name for tags
|
||||||
tags: |
|
tags: |
|
||||||
type=raw,value=latest,enable={{is_default_branch}}
|
type=raw,value=latest,enable={{is_default_branch}}
|
||||||
type=ref,event=branch
|
type=ref,event=branch
|
||||||
type=ref,event=pr
|
type=ref,event=pr
|
||||||
type=semver,pattern={{version}}
|
type=semver,pattern={{version}}
|
||||||
type=semver,pattern={{major}}.{{minor}}
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
type=sha
|
type=sha
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v1
|
uses: docker/setup-qemu-action@v1
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v1
|
uses: docker/setup-buildx-action@v1
|
||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to DockerHub
|
||||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||||
uses: docker/login-action@v1
|
uses: docker/login-action@v1
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_TOKEN }}
|
password: ${{ secrets.DOCKER_TOKEN }}
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
id: build
|
id: build
|
||||||
if: "${{ steps.docker_meta.outcome == 'skipped' }}"
|
if: "${{ steps.docker_meta.outcome == 'skipped' }}"
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v2
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
push: false
|
push: false
|
||||||
tags: jellyplex-watched:action
|
tags: jellyplex-watched:action
|
||||||
|
|
||||||
- name: Build Push
|
- name: Build Push
|
||||||
id: build_push
|
id: build_push
|
||||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v2
|
||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: ./Dockerfile
|
file: ./Dockerfile
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
push: true
|
push: true
|
||||||
tags: ${{ steps.docker_meta.outputs.tags }}
|
tags: ${{ steps.docker_meta.outputs.tags }}
|
||||||
labels: ${{ steps.docker_meta.outputs.labels }}
|
labels: ${{ steps.docker_meta.outputs.labels }}
|
||||||
|
|
||||||
# Echo digest so users can validate their image
|
# Echo digest so users can validate their image
|
||||||
- name: Image digest
|
- name: Image digest
|
||||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||||
run: echo "${{ steps.build_push.outputs.digest }}"
|
run: echo "${{ steps.build_push.outputs.digest }}"
|
||||||
|
|||||||
264
.gitignore
vendored
264
.gitignore
vendored
@@ -1,132 +1,132 @@
|
|||||||
.env
|
.env
|
||||||
*.prof
|
*.prof
|
||||||
|
|
||||||
# Byte-compiled / optimized / DLL files
|
# Byte-compiled / optimized / DLL files
|
||||||
__pycache__/
|
__pycache__/
|
||||||
*.py[cod]
|
*.py[cod]
|
||||||
*$py.class
|
*$py.class
|
||||||
|
|
||||||
# C extensions
|
# C extensions
|
||||||
*.so
|
*.so
|
||||||
|
|
||||||
# Distribution / packaging
|
# Distribution / packaging
|
||||||
.Python
|
.Python
|
||||||
build/
|
build/
|
||||||
develop-eggs/
|
develop-eggs/
|
||||||
dist/
|
dist/
|
||||||
downloads/
|
downloads/
|
||||||
eggs/
|
eggs/
|
||||||
.eggs/
|
.eggs/
|
||||||
lib/
|
lib/
|
||||||
lib64/
|
lib64/
|
||||||
parts/
|
parts/
|
||||||
sdist/
|
sdist/
|
||||||
var/
|
var/
|
||||||
wheels/
|
wheels/
|
||||||
pip-wheel-metadata/
|
pip-wheel-metadata/
|
||||||
share/python-wheels/
|
share/python-wheels/
|
||||||
*.egg-info/
|
*.egg-info/
|
||||||
.installed.cfg
|
.installed.cfg
|
||||||
*.egg
|
*.egg
|
||||||
MANIFEST
|
MANIFEST
|
||||||
|
|
||||||
# PyInstaller
|
# PyInstaller
|
||||||
# Usually these files are written by a python script from a template
|
# Usually these files are written by a python script from a template
|
||||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
*.manifest
|
*.manifest
|
||||||
*.spec
|
*.spec
|
||||||
|
|
||||||
# Installer logs
|
# Installer logs
|
||||||
pip-log.txt
|
pip-log.txt
|
||||||
pip-delete-this-directory.txt
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
# Unit test / coverage reports
|
# Unit test / coverage reports
|
||||||
htmlcov/
|
htmlcov/
|
||||||
.tox/
|
.tox/
|
||||||
.nox/
|
.nox/
|
||||||
.coverage
|
.coverage
|
||||||
.coverage.*
|
.coverage.*
|
||||||
.cache
|
.cache
|
||||||
nosetests.xml
|
nosetests.xml
|
||||||
coverage.xml
|
coverage.xml
|
||||||
*.cover
|
*.cover
|
||||||
*.py,cover
|
*.py,cover
|
||||||
.hypothesis/
|
.hypothesis/
|
||||||
.pytest_cache/
|
.pytest_cache/
|
||||||
|
|
||||||
# Translations
|
# Translations
|
||||||
*.mo
|
*.mo
|
||||||
*.pot
|
*.pot
|
||||||
|
|
||||||
# Django stuff:
|
# Django stuff:
|
||||||
*.log
|
*.log
|
||||||
local_settings.py
|
local_settings.py
|
||||||
db.sqlite3
|
db.sqlite3
|
||||||
db.sqlite3-journal
|
db.sqlite3-journal
|
||||||
|
|
||||||
# Flask stuff:
|
# Flask stuff:
|
||||||
instance/
|
instance/
|
||||||
.webassets-cache
|
.webassets-cache
|
||||||
|
|
||||||
# Scrapy stuff:
|
# Scrapy stuff:
|
||||||
.scrapy
|
.scrapy
|
||||||
|
|
||||||
# Sphinx documentation
|
# Sphinx documentation
|
||||||
docs/_build/
|
docs/_build/
|
||||||
|
|
||||||
# PyBuilder
|
# PyBuilder
|
||||||
target/
|
target/
|
||||||
|
|
||||||
# Jupyter Notebook
|
# Jupyter Notebook
|
||||||
.ipynb_checkpoints
|
.ipynb_checkpoints
|
||||||
|
|
||||||
# IPython
|
# IPython
|
||||||
profile_default/
|
profile_default/
|
||||||
ipython_config.py
|
ipython_config.py
|
||||||
|
|
||||||
# pyenv
|
# pyenv
|
||||||
.python-version
|
.python-version
|
||||||
|
|
||||||
# pipenv
|
# pipenv
|
||||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
# install all needed dependencies.
|
# install all needed dependencies.
|
||||||
#Pipfile.lock
|
#Pipfile.lock
|
||||||
|
|
||||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||||
__pypackages__/
|
__pypackages__/
|
||||||
|
|
||||||
# Celery stuff
|
# Celery stuff
|
||||||
celerybeat-schedule
|
celerybeat-schedule
|
||||||
celerybeat.pid
|
celerybeat.pid
|
||||||
|
|
||||||
# SageMath parsed files
|
# SageMath parsed files
|
||||||
*.sage.py
|
*.sage.py
|
||||||
|
|
||||||
# Environments
|
# Environments
|
||||||
.env
|
.env
|
||||||
.venv
|
.venv
|
||||||
env/
|
env/
|
||||||
venv/
|
venv/
|
||||||
ENV/
|
ENV/
|
||||||
env.bak/
|
env.bak/
|
||||||
venv.bak/
|
venv.bak/
|
||||||
|
|
||||||
# Spyder project settings
|
# Spyder project settings
|
||||||
.spyderproject
|
.spyderproject
|
||||||
.spyproject
|
.spyproject
|
||||||
|
|
||||||
# Rope project settings
|
# Rope project settings
|
||||||
.ropeproject
|
.ropeproject
|
||||||
|
|
||||||
# mkdocs documentation
|
# mkdocs documentation
|
||||||
/site
|
/site
|
||||||
|
|
||||||
# mypy
|
# mypy
|
||||||
.mypy_cache/
|
.mypy_cache/
|
||||||
.dmypy.json
|
.dmypy.json
|
||||||
dmypy.json
|
dmypy.json
|
||||||
|
|
||||||
# Pyre type checker
|
# Pyre type checker
|
||||||
.pyre/
|
.pyre/
|
||||||
|
|||||||
32
.vscode/launch.json
vendored
32
.vscode/launch.json
vendored
@@ -1,16 +1,16 @@
|
|||||||
{
|
{
|
||||||
// Use IntelliSense to learn about possible attributes.
|
// Use IntelliSense to learn about possible attributes.
|
||||||
// Hover to view descriptions of existing attributes.
|
// Hover to view descriptions of existing attributes.
|
||||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||||
"version": "0.2.0",
|
"version": "0.2.0",
|
||||||
"configurations": [
|
"configurations": [
|
||||||
{
|
{
|
||||||
"name": "Python: Main",
|
"name": "Python: Main",
|
||||||
"type": "python",
|
"type": "python",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"program": "main.py",
|
"program": "main.py",
|
||||||
"console": "integratedTerminal",
|
"console": "integratedTerminal",
|
||||||
"justMyCode": true
|
"justMyCode": true
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
70
Dockerfile
70
Dockerfile
@@ -1,35 +1,35 @@
|
|||||||
FROM python:3-slim
|
FROM python:3-slim
|
||||||
|
|
||||||
ENV DRYRUN 'True'
|
ENV DRYRUN 'True'
|
||||||
ENV DEBUG 'True'
|
ENV DEBUG 'True'
|
||||||
ENV DEBUG_LEVEL 'INFO'
|
ENV DEBUG_LEVEL 'INFO'
|
||||||
ENV SLEEP_DURATION '3600'
|
ENV SLEEP_DURATION '3600'
|
||||||
ENV LOGFILE 'log.log'
|
ENV LOGFILE 'log.log'
|
||||||
|
|
||||||
ENV USER_MAPPING '{ "User Test": "User Test2" }'
|
ENV USER_MAPPING '{ "User Test": "User Test2" }'
|
||||||
ENV LIBRARY_MAPPING '{ "Shows Test": "TV Shows Test" }'
|
ENV LIBRARY_MAPPING '{ "Shows Test": "TV Shows Test" }'
|
||||||
|
|
||||||
ENV PLEX_BASEURL 'http://localhost:32400'
|
ENV PLEX_BASEURL 'http://localhost:32400'
|
||||||
ENV PLEX_TOKEN ''
|
ENV PLEX_TOKEN ''
|
||||||
ENV PLEX_USERNAME ''
|
ENV PLEX_USERNAME ''
|
||||||
ENV PLEX_PASSWORD ''
|
ENV PLEX_PASSWORD ''
|
||||||
ENV PLEX_SERVERNAME ''
|
ENV PLEX_SERVERNAME ''
|
||||||
|
|
||||||
ENV JELLYFIN_BASEURL 'http://localhost:8096'
|
ENV JELLYFIN_BASEURL 'http://localhost:8096'
|
||||||
ENV JELLYFIN_TOKEN ''
|
ENV JELLYFIN_TOKEN ''
|
||||||
|
|
||||||
ENV BLACKLIST_LIBRARY ''
|
ENV BLACKLIST_LIBRARY ''
|
||||||
ENV WHITELIST_LIBRARY ''
|
ENV WHITELIST_LIBRARY ''
|
||||||
ENV BLACKLIST_LIBRARY_TYPE ''
|
ENV BLACKLIST_LIBRARY_TYPE ''
|
||||||
ENV WHITELIST_LIBRARY_TYPE ''
|
ENV WHITELIST_LIBRARY_TYPE ''
|
||||||
ENV BLACKLIST_USERS ''
|
ENV BLACKLIST_USERS ''
|
||||||
ENV WHITELIST_USERS ''
|
ENV WHITELIST_USERS ''
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY ./requirements.txt ./
|
COPY ./requirements.txt ./
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
CMD ["python", "-u", "main.py"]
|
CMD ["python", "-u", "main.py"]
|
||||||
|
|||||||
146
README.md
146
README.md
@@ -1,73 +1,73 @@
|
|||||||
# JellyPlex-Watched
|
# JellyPlex-Watched
|
||||||
|
|
||||||
[](https://www.codacy.com/gh/luigi311/JellyPlex-Watched/dashboard?utm_source=github.com&utm_medium=referral&utm_content=luigi311/JellyPlex-Watched&utm_campaign=Badge_Grade)
|
[](https://www.codacy.com/gh/luigi311/JellyPlex-Watched/dashboard?utm_source=github.com&utm_medium=referral&utm_content=luigi311/JellyPlex-Watched&utm_campaign=Badge_Grade)
|
||||||
|
|
||||||
Sync watched between jellyfin and plex
|
Sync watched between jellyfin and plex
|
||||||
|
|
||||||
## Description
|
## Description
|
||||||
|
|
||||||
Keep in sync all your users watched history between jellyfin and plex servers locally. This uses the imdb ids and any other matching id to find the correct episode/movie between the two. This is not perfect but it works for most cases. You can use this for as many servers as you want by enterying multiple options in the .env plex/jellyfin section seperated by commas.
|
Keep in sync all your users watched history between jellyfin and plex servers locally. This uses the imdb ids and any other matching id to find the correct episode/movie between the two. This is not perfect but it works for most cases. You can use this for as many servers as you want by enterying multiple options in the .env plex/jellyfin section seperated by commas.
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
### Baremetal
|
### Baremetal
|
||||||
|
|
||||||
- Setup virtualenv of your choice
|
- Setup virtualenv of your choice
|
||||||
|
|
||||||
- Install dependencies
|
- Install dependencies
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
pip install -r requirements.txt
|
pip install -r requirements.txt
|
||||||
```
|
```
|
||||||
|
|
||||||
- Create a .env file similar to .env.sample, uncomment whitelist and blacklist if needed, fill in baseurls and tokens
|
- Create a .env file similar to .env.sample, uncomment whitelist and blacklist if needed, fill in baseurls and tokens
|
||||||
|
|
||||||
- Run
|
- Run
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
python main.py
|
python main.py
|
||||||
```
|
```
|
||||||
|
|
||||||
### Docker
|
### Docker
|
||||||
|
|
||||||
- Build docker image
|
- Build docker image
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker build -t jellyplex-watched .
|
docker build -t jellyplex-watched .
|
||||||
```
|
```
|
||||||
|
|
||||||
- or use pre-built image
|
- or use pre-built image
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker pull luigi311/jellyplex-watched:latest
|
docker pull luigi311/jellyplex-watched:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
#### With variables
|
#### With variables
|
||||||
|
|
||||||
- Run
|
- Run
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run --rm -it -e PLEX_TOKEN='SuperSecretToken' luigi311/jellyplex-watched:latest
|
docker run --rm -it -e PLEX_TOKEN='SuperSecretToken' luigi311/jellyplex-watched:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
#### With .env
|
#### With .env
|
||||||
|
|
||||||
- Create a .env file similar to .env.sample and set the MNEMONIC variable to your seed phrase
|
- Create a .env file similar to .env.sample and set the variables to match your setup
|
||||||
|
|
||||||
- Run
|
- Run
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run --rm -it -v "$(pwd)/.env:/app/.env" luigi311/jellyplex-watched:latest
|
docker run --rm -it -v "$(pwd)/.env:/app/.env" luigi311/jellyplex-watched:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
I am open to recieving pull requests. If you are submitting a pull request, please make sure run it locally for a day or two to make sure it is working as expected and stable. Make all pull requests against the dev branch and nothing will be merged into the main without going through the lower branches.
|
I am open to recieving pull requests. If you are submitting a pull request, please make sure run it locally for a day or two to make sure it is working as expected and stable. Make all pull requests against the dev branch and nothing will be merged into the main without going through the lower branches.
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
This is currently under the GNU General Public License v3.0.
|
This is currently under the GNU General Public License v3.0.
|
||||||
|
|||||||
21
main.py
21
main.py
@@ -1,10 +1,11 @@
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == "__main__":
|
||||||
# Check python version 3.6 or higher
|
# Check python version 3.6 or higher
|
||||||
if not (3, 6) <= tuple(map(int, sys.version_info[:2])):
|
if not (3, 6) <= tuple(map(int, sys.version_info[:2])):
|
||||||
print("This script requires Python 3.6 or higher")
|
print("This script requires Python 3.6 or higher")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
from src.main import main
|
from src.main import main
|
||||||
main()
|
|
||||||
|
main()
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
plexapi
|
plexapi
|
||||||
requests
|
requests
|
||||||
python-dotenv
|
python-dotenv
|
||||||
|
aiohttp
|
||||||
|
|||||||
339
src/functions.py
339
src/functions.py
@@ -1,150 +1,189 @@
|
|||||||
import os
|
import os
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
load_dotenv(override=True)
|
load_dotenv(override=True)
|
||||||
|
|
||||||
logfile = os.getenv("LOGFILE","log.log")
|
logfile = os.getenv("LOGFILE", "log.log")
|
||||||
|
|
||||||
def logger(message: str, log_type=0):
|
|
||||||
debug = str_to_bool(os.getenv("DEBUG", "True"))
|
def logger(message: str, log_type=0):
|
||||||
debug_level = os.getenv("DEBUG_LEVEL", "info").lower()
|
debug = str_to_bool(os.getenv("DEBUG", "True"))
|
||||||
|
debug_level = os.getenv("DEBUG_LEVEL", "info").lower()
|
||||||
output = str(message)
|
|
||||||
if log_type == 0:
|
output = str(message)
|
||||||
pass
|
if log_type == 0:
|
||||||
elif log_type == 1 and (debug or debug_level == "info"):
|
pass
|
||||||
output = f"[INFO]: {output}"
|
elif log_type == 1 and (debug and debug_level == "info"):
|
||||||
elif log_type == 2:
|
output = f"[INFO]: {output}"
|
||||||
output = f"[ERROR]: {output}"
|
elif log_type == 2:
|
||||||
elif log_type == 3 and (debug and debug_level == "debug"):
|
output = f"[ERROR]: {output}"
|
||||||
output = f"[DEBUG]: {output}"
|
elif log_type == 3 and (debug and debug_level == "debug"):
|
||||||
else:
|
output = f"[DEBUG]: {output}"
|
||||||
output = None
|
elif log_type == 4:
|
||||||
|
output = f"[WARNING]: {output}"
|
||||||
if output is not None:
|
else:
|
||||||
print(output)
|
output = None
|
||||||
file = open(logfile, "a", encoding="utf-8")
|
|
||||||
file.write(output + "\n")
|
if output is not None:
|
||||||
|
print(output)
|
||||||
# Reimplementation of distutils.util.strtobool due to it being deprecated
|
file = open(logfile, "a", encoding="utf-8")
|
||||||
# Source: https://github.com/PostHog/posthog/blob/01e184c29d2c10c43166f1d40a334abbc3f99d8a/posthog/utils.py#L668
|
file.write(output + "\n")
|
||||||
def str_to_bool(value: any) -> bool:
|
|
||||||
if not value:
|
|
||||||
return False
|
# Reimplementation of distutils.util.strtobool due to it being deprecated
|
||||||
return str(value).lower() in ("y", "yes", "t", "true", "on", "1")
|
# Source: https://github.com/PostHog/posthog/blob/01e184c29d2c10c43166f1d40a334abbc3f99d8a/posthog/utils.py#L668
|
||||||
|
def str_to_bool(value: any) -> bool:
|
||||||
# Get mapped value
|
if not value:
|
||||||
def search_mapping(dictionary: dict, key_value: str):
|
return False
|
||||||
if key_value in dictionary.keys():
|
return str(value).lower() in ("y", "yes", "t", "true", "on", "1")
|
||||||
return dictionary[key_value]
|
|
||||||
elif key_value.lower() in dictionary.keys():
|
|
||||||
return dictionary[key_value.lower()]
|
# Get mapped value
|
||||||
elif key_value in dictionary.values():
|
def search_mapping(dictionary: dict, key_value: str):
|
||||||
return list(dictionary.keys())[list(dictionary.values()).index(key_value)]
|
if key_value in dictionary.keys():
|
||||||
elif key_value.lower() in dictionary.values():
|
return dictionary[key_value]
|
||||||
return list(dictionary.keys())[list(dictionary.values()).index(key_value.lower())]
|
elif key_value.lower() in dictionary.keys():
|
||||||
else:
|
return dictionary[key_value.lower()]
|
||||||
return None
|
elif key_value in dictionary.values():
|
||||||
|
return list(dictionary.keys())[list(dictionary.values()).index(key_value)]
|
||||||
|
elif key_value.lower() in dictionary.values():
|
||||||
def check_skip_logic(library_title, library_type, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping):
|
return list(dictionary.keys())[
|
||||||
skip_reason = None
|
list(dictionary.values()).index(key_value.lower())
|
||||||
|
]
|
||||||
if library_type.lower() in blacklist_library_type:
|
else:
|
||||||
skip_reason = "is blacklist_library_type"
|
return None
|
||||||
|
|
||||||
if library_title.lower() in [x.lower() for x in blacklist_library]:
|
|
||||||
skip_reason = "is blacklist_library"
|
def check_skip_logic(
|
||||||
|
library_title,
|
||||||
library_other = None
|
library_type,
|
||||||
if library_mapping:
|
blacklist_library,
|
||||||
library_other = search_mapping(library_mapping, library_title)
|
whitelist_library,
|
||||||
if library_other:
|
blacklist_library_type,
|
||||||
if library_other.lower() in [x.lower() for x in blacklist_library]:
|
whitelist_library_type,
|
||||||
skip_reason = "is blacklist_library"
|
library_mapping,
|
||||||
|
):
|
||||||
if len(whitelist_library_type) > 0:
|
skip_reason = None
|
||||||
if library_type.lower() not in whitelist_library_type:
|
|
||||||
skip_reason = "is not whitelist_library_type"
|
if library_type.lower() in blacklist_library_type:
|
||||||
|
skip_reason = "is blacklist_library_type"
|
||||||
# if whitelist is not empty and library is not in whitelist
|
|
||||||
if len(whitelist_library) > 0:
|
if library_title.lower() in [x.lower() for x in blacklist_library]:
|
||||||
if library_title.lower() not in [x.lower() for x in whitelist_library]:
|
skip_reason = "is blacklist_library"
|
||||||
skip_reason = "is not whitelist_library"
|
|
||||||
|
library_other = None
|
||||||
if library_other:
|
if library_mapping:
|
||||||
if library_other.lower() not in [x.lower() for x in whitelist_library]:
|
library_other = search_mapping(library_mapping, library_title)
|
||||||
skip_reason = "is not whitelist_library"
|
if library_other:
|
||||||
|
if library_other.lower() in [x.lower() for x in blacklist_library]:
|
||||||
return skip_reason
|
skip_reason = "is blacklist_library"
|
||||||
|
|
||||||
|
if len(whitelist_library_type) > 0:
|
||||||
def generate_library_guids_dict(user_list: dict, generate_output: int):
|
if library_type.lower() not in whitelist_library_type:
|
||||||
# if generate_output is 0 then only generate shows, if 1 then only generate episodes, if 2 then generate movies, if 3 then generate shows and episodes
|
skip_reason = "is not whitelist_library_type"
|
||||||
show_output_dict = {}
|
|
||||||
episode_output_dict = {}
|
# if whitelist is not empty and library is not in whitelist
|
||||||
movies_output_dict = {}
|
if len(whitelist_library) > 0:
|
||||||
|
if library_title.lower() not in [x.lower() for x in whitelist_library]:
|
||||||
if generate_output in (0, 3):
|
skip_reason = "is not whitelist_library"
|
||||||
show_output_keys = user_list.keys()
|
|
||||||
show_output_keys = ([ dict(x) for x in list(show_output_keys) ])
|
if library_other:
|
||||||
for show_key in show_output_keys:
|
if library_other.lower() not in [x.lower() for x in whitelist_library]:
|
||||||
for provider_key, provider_value in show_key.items():
|
skip_reason = "is not whitelist_library"
|
||||||
# Skip title
|
|
||||||
if provider_key.lower() == "title":
|
return skip_reason
|
||||||
continue
|
|
||||||
if provider_key.lower() not in show_output_dict:
|
|
||||||
show_output_dict[provider_key.lower()] = []
|
def generate_library_guids_dict(user_list: dict):
|
||||||
if provider_key.lower() == "locations":
|
show_output_dict = {}
|
||||||
for show_location in provider_value:
|
episode_output_dict = {}
|
||||||
show_output_dict[provider_key.lower()].append(show_location)
|
movies_output_dict = {}
|
||||||
else:
|
|
||||||
show_output_dict[provider_key.lower()].append(provider_value.lower())
|
try:
|
||||||
|
show_output_keys = user_list.keys()
|
||||||
if generate_output in (1, 3):
|
show_output_keys = [dict(x) for x in list(show_output_keys)]
|
||||||
for show in user_list:
|
for show_key in show_output_keys:
|
||||||
for season in user_list[show]:
|
for provider_key, provider_value in show_key.items():
|
||||||
for episode in user_list[show][season]:
|
# Skip title
|
||||||
for episode_key, episode_value in episode.items():
|
if provider_key.lower() == "title":
|
||||||
if episode_key.lower() not in episode_output_dict:
|
continue
|
||||||
episode_output_dict[episode_key.lower()] = []
|
if provider_key.lower() not in show_output_dict:
|
||||||
if episode_key == "locations":
|
show_output_dict[provider_key.lower()] = []
|
||||||
for episode_location in episode_value:
|
if provider_key.lower() == "locations":
|
||||||
episode_output_dict[episode_key.lower()].append(episode_location)
|
for show_location in provider_value:
|
||||||
else:
|
show_output_dict[provider_key.lower()].append(show_location)
|
||||||
episode_output_dict[episode_key.lower()].append(episode_value.lower())
|
else:
|
||||||
|
show_output_dict[provider_key.lower()].append(
|
||||||
if generate_output == 2:
|
provider_value.lower()
|
||||||
for movie in user_list:
|
)
|
||||||
for movie_key, movie_value in movie.items():
|
except Exception:
|
||||||
if movie_key.lower() not in movies_output_dict:
|
logger("Generating show_output_dict failed, skipping", 1)
|
||||||
movies_output_dict[movie_key.lower()] = []
|
|
||||||
if movie_key == "locations":
|
try:
|
||||||
for movie_location in movie_value:
|
for show in user_list:
|
||||||
movies_output_dict[movie_key.lower()].append(movie_location)
|
for season in user_list[show]:
|
||||||
else:
|
for episode in user_list[show][season]:
|
||||||
movies_output_dict[movie_key.lower()].append(movie_value.lower())
|
for episode_key, episode_value in episode.items():
|
||||||
|
if episode_key.lower() not in episode_output_dict:
|
||||||
return show_output_dict, episode_output_dict, movies_output_dict
|
episode_output_dict[episode_key.lower()] = []
|
||||||
|
if episode_key == "locations":
|
||||||
def future_thread_executor(args: list, workers: int = -1):
|
for episode_location in episode_value:
|
||||||
futures_list = []
|
episode_output_dict[episode_key.lower()].append(
|
||||||
results = []
|
episode_location
|
||||||
|
)
|
||||||
if workers == -1:
|
else:
|
||||||
workers = min(32, os.cpu_count()*1.25)
|
episode_output_dict[episode_key.lower()].append(
|
||||||
|
episode_value.lower()
|
||||||
with ThreadPoolExecutor(max_workers=workers) as executor:
|
)
|
||||||
for arg in args:
|
except Exception:
|
||||||
# * arg unpacks the list into actual arguments
|
logger("Generating episode_output_dict failed, skipping", 1)
|
||||||
futures_list.append(executor.submit(*arg))
|
|
||||||
|
try:
|
||||||
for future in futures_list:
|
for movie in user_list:
|
||||||
try:
|
for movie_key, movie_value in movie.items():
|
||||||
result = future.result()
|
if movie_key.lower() not in movies_output_dict:
|
||||||
results.append(result)
|
movies_output_dict[movie_key.lower()] = []
|
||||||
except Exception as e:
|
if movie_key == "locations":
|
||||||
raise Exception(e)
|
for movie_location in movie_value:
|
||||||
|
movies_output_dict[movie_key.lower()].append(movie_location)
|
||||||
return results
|
else:
|
||||||
|
movies_output_dict[movie_key.lower()].append(movie_value.lower())
|
||||||
|
except Exception:
|
||||||
|
logger("Generating movies_output_dict failed, skipping", 1)
|
||||||
|
|
||||||
|
return show_output_dict, episode_output_dict, movies_output_dict
|
||||||
|
|
||||||
|
|
||||||
|
def combine_watched_dicts(dicts: list):
|
||||||
|
combined_dict = {}
|
||||||
|
for single_dict in dicts:
|
||||||
|
for key, value in single_dict.items():
|
||||||
|
if key not in combined_dict:
|
||||||
|
combined_dict[key] = {}
|
||||||
|
for subkey, subvalue in value.items():
|
||||||
|
combined_dict[key][subkey] = subvalue
|
||||||
|
|
||||||
|
return combined_dict
|
||||||
|
|
||||||
|
|
||||||
|
def future_thread_executor(args: list, workers: int = -1):
|
||||||
|
futures_list = []
|
||||||
|
results = []
|
||||||
|
|
||||||
|
if workers == -1:
|
||||||
|
workers = min(32, os.cpu_count() * 1.25)
|
||||||
|
|
||||||
|
with ThreadPoolExecutor(max_workers=workers) as executor:
|
||||||
|
for arg in args:
|
||||||
|
# * arg unpacks the list into actual arguments
|
||||||
|
futures_list.append(executor.submit(*arg))
|
||||||
|
|
||||||
|
for future in futures_list:
|
||||||
|
try:
|
||||||
|
result = future.result()
|
||||||
|
results.append(result)
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
return results
|
||||||
|
|||||||
965
src/jellyfin.py
965
src/jellyfin.py
@@ -1,319 +1,646 @@
|
|||||||
import requests
|
import asyncio, aiohttp
|
||||||
from src.functions import logger, search_mapping, str_to_bool, check_skip_logic, generate_library_guids_dict, future_thread_executor
|
from src.functions import (
|
||||||
|
logger,
|
||||||
class Jellyfin():
|
search_mapping,
|
||||||
def __init__(self, baseurl, token):
|
check_skip_logic,
|
||||||
self.baseurl = baseurl
|
generate_library_guids_dict,
|
||||||
self.token = token
|
combine_watched_dicts,
|
||||||
self.session = requests.Session()
|
)
|
||||||
|
|
||||||
if not self.baseurl:
|
|
||||||
raise Exception("Jellyfin baseurl not set")
|
class Jellyfin:
|
||||||
|
def __init__(self, baseurl, token):
|
||||||
if not self.token:
|
self.baseurl = baseurl
|
||||||
raise Exception("Jellyfin token not set")
|
self.token = token
|
||||||
|
|
||||||
self.users = self.get_users()
|
if not self.baseurl:
|
||||||
|
raise Exception("Jellyfin baseurl not set")
|
||||||
|
|
||||||
def query(self, query, query_type):
|
if not self.token:
|
||||||
try:
|
raise Exception("Jellyfin token not set")
|
||||||
response = None
|
|
||||||
|
self.users = asyncio.run(self.get_users())
|
||||||
headers = {
|
|
||||||
"Accept": "application/json",
|
async def query(self, query, query_type, session, identifiers=None):
|
||||||
"X-Emby-Token": self.token
|
try:
|
||||||
}
|
results = None
|
||||||
if query_type == "get":
|
headers = {"Accept": "application/json", "X-Emby-Token": self.token}
|
||||||
response = self.session.get(self.baseurl + query, headers=headers)
|
authorization = (
|
||||||
|
"MediaBrowser , "
|
||||||
elif query_type == "post":
|
'Client="other", '
|
||||||
authorization = (
|
'Device="script", '
|
||||||
'MediaBrowser , '
|
'DeviceId="script", '
|
||||||
'Client="other", '
|
'Version="0.0.0"'
|
||||||
'Device="script", '
|
)
|
||||||
'DeviceId="script", '
|
headers["X-Emby-Authorization"] = authorization
|
||||||
'Version="0.0.0"'
|
|
||||||
)
|
if query_type == "get":
|
||||||
headers["X-Emby-Authorization"] = authorization
|
async with session.get(
|
||||||
response = self.session.post(self.baseurl + query, headers=headers)
|
self.baseurl + query, headers=headers
|
||||||
|
) as response:
|
||||||
return response.json()
|
results = await response.json()
|
||||||
|
|
||||||
except Exception as e:
|
elif query_type == "post":
|
||||||
logger(f"Jellyfin: Query failed {e}", 2)
|
async with session.post(
|
||||||
raise Exception(e)
|
self.baseurl + query, headers=headers
|
||||||
|
) as response:
|
||||||
def get_users(self):
|
results = await response.json()
|
||||||
try:
|
|
||||||
users = {}
|
# append identifiers to results
|
||||||
|
if identifiers:
|
||||||
query = "/Users"
|
results["Identifiers"] = identifiers
|
||||||
response = self.query(query, "get")
|
return results
|
||||||
|
|
||||||
# If reponse is not empty
|
except Exception as e:
|
||||||
if response:
|
logger(f"Jellyfin: Query failed {e}", 2)
|
||||||
for user in response:
|
raise Exception(e)
|
||||||
users[user["Name"]] = user["Id"]
|
|
||||||
|
async def get_users(self):
|
||||||
return users
|
try:
|
||||||
except Exception as e:
|
users = {}
|
||||||
logger(f"Jellyfin: Get users failed {e}", 2)
|
|
||||||
raise Exception(e)
|
query_string = "/Users"
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
def get_user_watched(self, user_name, user_id, library_type, library_id, library_title):
|
response = await self.query(query_string, "get", session)
|
||||||
try:
|
|
||||||
user_name = user_name.lower()
|
# If reponse is not empty
|
||||||
user_watched = {}
|
if response:
|
||||||
user_watched[user_name] = {}
|
for user in response:
|
||||||
|
users[user["Name"]] = user["Id"]
|
||||||
logger(f"Jellyfin: Generating watched for {user_name} in library {library_title}", 0)
|
|
||||||
# Movies
|
return users
|
||||||
if library_type == "Movie":
|
except Exception as e:
|
||||||
user_watched[user_name][library_title] = []
|
logger(f"Jellyfin: Get users failed {e}", 2)
|
||||||
watched = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=true&ParentId={library_id}&Filters=IsPlayed&Fields=ItemCounts,ProviderIds,MediaSources", "get")
|
raise Exception(e)
|
||||||
for movie in watched["Items"]:
|
|
||||||
if movie["UserData"]["Played"] == True:
|
async def get_user_watched(
|
||||||
movie_guids = {}
|
self, user_name, user_id, library_type, library_id, library_title
|
||||||
movie_guids["title"] = movie["Name"]
|
):
|
||||||
if movie["ProviderIds"]:
|
try:
|
||||||
# Lowercase movie["ProviderIds"] keys
|
user_name = user_name.lower()
|
||||||
movie_guids = {k.lower(): v for k, v in movie["ProviderIds"].items()}
|
user_watched = {}
|
||||||
if movie["MediaSources"]:
|
user_watched[user_name] = {}
|
||||||
movie_guids["locations"] = tuple([x["Path"].split("/")[-1] for x in movie["MediaSources"]])
|
|
||||||
user_watched[user_name][library_title].append(movie_guids)
|
logger(
|
||||||
|
f"Jellyfin: Generating watched for {user_name} in library {library_title}",
|
||||||
# TV Shows
|
0,
|
||||||
if library_type == "Episode":
|
)
|
||||||
user_watched[user_name][library_title] = {}
|
# Movies
|
||||||
watched = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=true&ParentId={library_id}&Fields=ItemCounts,ProviderIds,Path", "get")
|
async with aiohttp.ClientSession() as session:
|
||||||
watched_shows = [x for x in watched["Items"] if x["Type"] == "Series"]
|
if library_type == "Movie":
|
||||||
|
user_watched[user_name][library_title] = []
|
||||||
for show in watched_shows:
|
watched = await self.query(
|
||||||
show_guids = {k.lower(): v for k, v in show["ProviderIds"].items()}
|
f"/Users/{user_id}/Items"
|
||||||
show_guids["title"] = show["Name"]
|
+ f"?ParentId={library_id}&Filters=IsPlayed&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||||
show_guids["locations"] = tuple([show["Path"].split("/")[-1]])
|
"get",
|
||||||
show_guids = frozenset(show_guids.items())
|
session,
|
||||||
seasons = self.query(f"/Shows/{show['Id']}/Seasons?userId={user_id}&Fields=ItemCounts,ProviderIds", "get")
|
)
|
||||||
if len(seasons["Items"]) > 0:
|
for movie in watched["Items"]:
|
||||||
for season in seasons["Items"]:
|
if movie["UserData"]["Played"] is True:
|
||||||
episodes = self.query(f"/Shows/{show['Id']}/Episodes?seasonId={season['Id']}&userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources", "get")
|
movie_guids = {}
|
||||||
if len(episodes["Items"]) > 0:
|
movie_guids["title"] = movie["Name"]
|
||||||
for episode in episodes["Items"]:
|
if "ProviderIds" in movie:
|
||||||
if episode["UserData"]["Played"] == True:
|
# Lowercase movie["ProviderIds"] keys
|
||||||
if episode["ProviderIds"] or episode["MediaSources"]:
|
movie_guids = {
|
||||||
if show_guids not in user_watched[user_name][library_title]:
|
k.lower(): v
|
||||||
user_watched[user_name][library_title][show_guids] = {}
|
for k, v in movie["ProviderIds"].items()
|
||||||
if season["Name"] not in user_watched[user_name][library_title][show_guids]:
|
}
|
||||||
user_watched[user_name][library_title][show_guids][season["Name"]] = []
|
if "MediaSources" in movie:
|
||||||
|
movie_guids["locations"] = tuple(
|
||||||
# Lowercase episode["ProviderIds"] keys
|
[
|
||||||
episode_guids = {}
|
x["Path"].split("/")[-1]
|
||||||
if episode["ProviderIds"]:
|
for x in movie["MediaSources"]
|
||||||
episode_guids = {k.lower(): v for k, v in episode["ProviderIds"].items()}
|
]
|
||||||
if episode["MediaSources"]:
|
)
|
||||||
episode_guids["locations"] = tuple([x["Path"].split("/")[-1] for x in episode["MediaSources"]])
|
user_watched[user_name][library_title].append(movie_guids)
|
||||||
user_watched[user_name][library_title][show_guids][season["Name"]].append(episode_guids)
|
|
||||||
|
# TV Shows
|
||||||
return user_watched
|
if library_type == "Series":
|
||||||
except Exception as e:
|
user_watched[user_name][library_title] = {}
|
||||||
logger(f"Jellyfin: Failed to get watched for {user_name} in library {library_title}, Error: {e}", 2)
|
watched_shows = await self.query(
|
||||||
raise Exception(e)
|
f"/Users/{user_id}/Items"
|
||||||
|
+ f"?ParentId={library_id}&isPlaceHolder=false&Fields=ProviderIds,Path,RecursiveItemCount",
|
||||||
|
"get",
|
||||||
def get_watched(self, users, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping=None):
|
session,
|
||||||
try:
|
)
|
||||||
users_watched = {}
|
watched_shows_filtered = []
|
||||||
args = []
|
for show in watched_shows["Items"]:
|
||||||
|
if "PlayedPercentage" in show["UserData"]:
|
||||||
for user_name, user_id in users.items():
|
if show["UserData"]["PlayedPercentage"] > 0:
|
||||||
# Get all libraries
|
watched_shows_filtered.append(show)
|
||||||
user_name = user_name.lower()
|
seasons_tasks = []
|
||||||
|
for show in watched_shows_filtered:
|
||||||
libraries = self.query(f"/Users/{user_id}/Views", "get")["Items"]
|
show_guids = {
|
||||||
|
k.lower(): v for k, v in show["ProviderIds"].items()
|
||||||
for library in libraries:
|
}
|
||||||
library_title = library["Name"]
|
show_guids["title"] = show["Name"]
|
||||||
library_id = library["Id"]
|
show_guids["locations"] = tuple([show["Path"].split("/")[-1]])
|
||||||
watched = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=true&ParentId={library_id}&Filters=IsPlayed&limit=1", "get")
|
show_guids = frozenset(show_guids.items())
|
||||||
|
identifiers = {"show_guids": show_guids, "show_id": show["Id"]}
|
||||||
if len(watched["Items"]) == 0:
|
task = asyncio.ensure_future(
|
||||||
logger(f"Jellyfin: No watched items found in library {library_title}", 1)
|
self.query(
|
||||||
continue
|
f"/Shows/{show['Id']}/Seasons"
|
||||||
else:
|
+ f"?userId={user_id}&isPlaceHolder=false&Fields=ProviderIds,RecursiveItemCount",
|
||||||
library_type = watched["Items"][0]["Type"]
|
"get",
|
||||||
|
session,
|
||||||
skip_reason = check_skip_logic(library_title, library_type, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping)
|
frozenset(identifiers.items()),
|
||||||
|
)
|
||||||
if skip_reason:
|
)
|
||||||
logger(f"Jellyfin: Skipping library {library_title} {skip_reason}", 1)
|
seasons_tasks.append(task)
|
||||||
continue
|
|
||||||
|
seasons_watched = await asyncio.gather(*seasons_tasks)
|
||||||
args.append([self.get_user_watched, user_name, user_id, library_type, library_id, library_title])
|
seasons_watched_filtered = []
|
||||||
|
|
||||||
for user_watched in future_thread_executor(args):
|
for seasons in seasons_watched:
|
||||||
for user, user_watched_temp in user_watched.items():
|
seasons_watched_filtered_dict = {}
|
||||||
if user not in users_watched:
|
seasons_watched_filtered_dict["Identifiers"] = seasons[
|
||||||
users_watched[user] = {}
|
"Identifiers"
|
||||||
users_watched[user].update(user_watched_temp)
|
]
|
||||||
|
seasons_watched_filtered_dict["Items"] = []
|
||||||
return users_watched
|
for season in seasons["Items"]:
|
||||||
except Exception as e:
|
if "PlayedPercentage" in season["UserData"]:
|
||||||
logger(f"Jellyfin: Failed to get watched, Error: {e}", 2)
|
if season["UserData"]["PlayedPercentage"] > 0:
|
||||||
raise Exception(e)
|
seasons_watched_filtered_dict["Items"].append(
|
||||||
|
season
|
||||||
def update_user_watched(self, user, user_id, library, library_id, videos, dryrun):
|
)
|
||||||
try:
|
|
||||||
logger(f"Jellyfin: Updating watched for {user} in library {library}", 1)
|
if seasons_watched_filtered_dict["Items"]:
|
||||||
library_search = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=true&ParentId={library_id}&limit=1", "get")
|
seasons_watched_filtered.append(
|
||||||
library_type = library_search["Items"][0]["Type"]
|
seasons_watched_filtered_dict
|
||||||
|
)
|
||||||
# Movies
|
|
||||||
if library_type == "Movie":
|
episodes_tasks = []
|
||||||
_, _, videos_movies_ids = generate_library_guids_dict(videos, 2)
|
for seasons in seasons_watched_filtered:
|
||||||
|
if len(seasons["Items"]) > 0:
|
||||||
jellyfin_search = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=false&ParentId={library_id}&isPlayed=false&Fields=ItemCounts,ProviderIds,MediaSources", "get")
|
for season in seasons["Items"]:
|
||||||
for jellyfin_video in jellyfin_search["Items"]:
|
season_identifiers = dict(seasons["Identifiers"])
|
||||||
movie_found = False
|
season_identifiers["season_id"] = season["Id"]
|
||||||
|
season_identifiers["season_name"] = season["Name"]
|
||||||
if "MediaSources" in jellyfin_video:
|
task = asyncio.ensure_future(
|
||||||
for movie_location in jellyfin_video["MediaSources"]:
|
self.query(
|
||||||
if movie_location["Path"].split("/")[-1] in videos_movies_ids["locations"]:
|
f"/Shows/{season_identifiers['show_id']}/Episodes"
|
||||||
movie_found = True
|
+ f"?seasonId={season['Id']}&userId={user_id}&isPlaceHolder=false&isPlayed=true&Fields=ProviderIds,MediaSources",
|
||||||
break
|
"get",
|
||||||
|
session,
|
||||||
if not movie_found:
|
frozenset(season_identifiers.items()),
|
||||||
for movie_provider_source, movie_provider_id in jellyfin_video["ProviderIds"].items():
|
)
|
||||||
if movie_provider_source.lower() in videos_movies_ids:
|
)
|
||||||
if movie_provider_id.lower() in videos_movies_ids[movie_provider_source.lower()]:
|
episodes_tasks.append(task)
|
||||||
movie_found = True
|
|
||||||
break
|
watched_episodes = await asyncio.gather(*episodes_tasks)
|
||||||
|
for episodes in watched_episodes:
|
||||||
if movie_found:
|
if len(episodes["Items"]) > 0:
|
||||||
jellyfin_video_id = jellyfin_video["Id"]
|
for episode in episodes["Items"]:
|
||||||
msg = f"{jellyfin_video['Name']} as watched for {user} in {library} for Jellyfin"
|
if episode["UserData"]["Played"] is True:
|
||||||
if not dryrun:
|
if (
|
||||||
logger(f"Marking {msg}", 0)
|
"ProviderIds" in episode
|
||||||
self.query(f"/Users/{user_id}/PlayedItems/{jellyfin_video_id}", "post")
|
or "MediaSources" in episode
|
||||||
else:
|
):
|
||||||
logger(f"Dryrun {msg}", 0)
|
episode_identifiers = dict(
|
||||||
|
episodes["Identifiers"]
|
||||||
|
)
|
||||||
# TV Shows
|
show_guids = episode_identifiers["show_guids"]
|
||||||
if library_type == "Episode":
|
if (
|
||||||
videos_shows_ids, videos_episode_ids, _ = generate_library_guids_dict(videos, 3)
|
show_guids
|
||||||
|
not in user_watched[user_name][
|
||||||
jellyfin_search = self.query(f"/Users/{user_id}/Items?SortBy=SortName&SortOrder=Ascending&Recursive=false&ParentId={library_id}&isPlayed=false&Fields=ItemCounts,ProviderIds,Path", "get")
|
library_title
|
||||||
jellyfin_shows = [x for x in jellyfin_search["Items"]]
|
]
|
||||||
|
):
|
||||||
for jellyfin_show in jellyfin_shows:
|
user_watched[user_name][library_title][
|
||||||
show_found = False
|
show_guids
|
||||||
|
] = {}
|
||||||
if jellyfin_show["Name"] == "The 13 Ghosts of Scooby-Doo":
|
if (
|
||||||
print(jellyfin_show)
|
episode_identifiers["season_name"]
|
||||||
|
not in user_watched[user_name][
|
||||||
if "Path" in jellyfin_show:
|
library_title
|
||||||
if jellyfin_show["Path"].split("/")[-1] in videos_shows_ids["locations"]:
|
][show_guids]
|
||||||
show_found = True
|
):
|
||||||
|
user_watched[user_name][library_title][
|
||||||
if not show_found:
|
show_guids
|
||||||
for show_provider_source, show_provider_id in jellyfin_show["ProviderIds"].items():
|
][episode_identifiers["season_name"]] = []
|
||||||
if show_provider_source.lower() in videos_shows_ids:
|
|
||||||
if show_provider_id.lower() in videos_shows_ids[show_provider_source.lower()]:
|
episode_guids = {}
|
||||||
show_found = True
|
if "ProviderIds" in episode:
|
||||||
break
|
episode_guids = {
|
||||||
|
k.lower(): v
|
||||||
if show_found:
|
for k, v in episode[
|
||||||
jellyfin_show_id = jellyfin_show["Id"]
|
"ProviderIds"
|
||||||
jellyfin_episodes = self.query(f"/Shows/{jellyfin_show_id}/Episodes?userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources", "get")
|
].items()
|
||||||
|
}
|
||||||
for jellyfin_episode in jellyfin_episodes["Items"]:
|
if "MediaSources" in episode:
|
||||||
episode_found = False
|
episode_guids["locations"] = tuple(
|
||||||
|
[
|
||||||
if "MediaSources" in jellyfin_episode:
|
x["Path"].split("/")[-1]
|
||||||
for episode_location in jellyfin_episode["MediaSources"]:
|
for x in episode["MediaSources"]
|
||||||
if episode_location["Path"].split("/")[-1] in videos_episode_ids["locations"]:
|
]
|
||||||
episode_found = True
|
)
|
||||||
break
|
user_watched[user_name][library_title][
|
||||||
|
show_guids
|
||||||
if not episode_found:
|
][episode_identifiers["season_name"]].append(
|
||||||
for episode_provider_source, episode_provider_id in jellyfin_episode["ProviderIds"].items():
|
episode_guids
|
||||||
if episode_provider_source.lower() in videos_episode_ids:
|
)
|
||||||
if episode_provider_id.lower() in videos_episode_ids[episode_provider_source.lower()]:
|
|
||||||
episode_found = True
|
return user_watched
|
||||||
break
|
except Exception as e:
|
||||||
|
logger(
|
||||||
if episode_found:
|
f"Jellyfin: Failed to get watched for {user_name} in library {library_title}, Error: {e}",
|
||||||
jellyfin_episode_id = jellyfin_episode["Id"]
|
2,
|
||||||
msg = f"{jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode['IndexNumber']} {jellyfin_episode['Name']} as watched for {user} in {library} for Jellyfin"
|
)
|
||||||
if not dryrun:
|
raise Exception(e)
|
||||||
logger(f"Marked {msg}", 0)
|
|
||||||
self.query(f"/Users/{user_id}/PlayedItems/{jellyfin_episode_id}", "post")
|
async def get_users_watched(
|
||||||
else:
|
self,
|
||||||
logger(f"Dryrun {msg}", 0)
|
user_name,
|
||||||
|
user_id,
|
||||||
except Exception as e:
|
blacklist_library,
|
||||||
logger(f"Jellyfin: Error updating watched for {user} in library {library}", 2)
|
whitelist_library,
|
||||||
raise Exception(e)
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
def update_watched(self, watched_list, user_mapping=None, library_mapping=None, dryrun=False):
|
):
|
||||||
try:
|
try:
|
||||||
args = []
|
# Get all libraries
|
||||||
for user, libraries in watched_list.items():
|
user_name = user_name.lower()
|
||||||
user_other = None
|
tasks_watched = []
|
||||||
if user_mapping:
|
|
||||||
if user in user_mapping.keys():
|
tasks_libraries = []
|
||||||
user_other = user_mapping[user]
|
async with aiohttp.ClientSession() as session:
|
||||||
elif user in user_mapping.values():
|
libraries = await self.query(f"/Users/{user_id}/Views", "get", session)
|
||||||
user_other = search_mapping(user_mapping, user)
|
for library in libraries["Items"]:
|
||||||
|
library_id = library["Id"]
|
||||||
user_id = None
|
library_title = library["Name"]
|
||||||
for key in self.users.keys():
|
identifiers = {
|
||||||
if user.lower() == key.lower():
|
"library_id": library_id,
|
||||||
user_id = self.users[key]
|
"library_title": library_title,
|
||||||
break
|
}
|
||||||
elif user_other and user_other.lower() == key.lower():
|
task = asyncio.ensure_future(
|
||||||
user_id = self.users[key]
|
self.query(
|
||||||
break
|
f"/Users/{user_id}/Items"
|
||||||
|
+ f"?ParentId={library_id}&Filters=IsPlayed&limit=1",
|
||||||
if not user_id:
|
"get",
|
||||||
logger(f"{user} {user_other} not found in Jellyfin", 2)
|
session,
|
||||||
continue
|
identifiers=identifiers,
|
||||||
|
)
|
||||||
jellyfin_libraries = self.query(f"/Users/{user_id}/Views", "get")["Items"]
|
)
|
||||||
|
tasks_libraries.append(task)
|
||||||
for library, videos in libraries.items():
|
|
||||||
library_other = None
|
libraries = await asyncio.gather(
|
||||||
if library_mapping:
|
*tasks_libraries, return_exceptions=True
|
||||||
if library in library_mapping.keys():
|
)
|
||||||
library_other = library_mapping[library]
|
|
||||||
elif library in library_mapping.values():
|
for watched in libraries:
|
||||||
library_other = search_mapping(library_mapping, library)
|
if len(watched["Items"]) == 0:
|
||||||
|
continue
|
||||||
|
|
||||||
if library.lower() not in [x["Name"].lower() for x in jellyfin_libraries]:
|
library_id = watched["Identifiers"]["library_id"]
|
||||||
if library_other:
|
library_title = watched["Identifiers"]["library_title"]
|
||||||
if library_other.lower() in [x["Name"].lower() for x in jellyfin_libraries]:
|
library_type = watched["Items"][0]["Type"]
|
||||||
logger(f"Jellyfin: Library {library} not found, but {library_other} found, using {library_other}", 1)
|
|
||||||
library = library_other
|
skip_reason = check_skip_logic(
|
||||||
else:
|
library_title,
|
||||||
logger(f"Jellyfin: Library {library} or {library_other} not found in library list", 2)
|
library_type,
|
||||||
continue
|
blacklist_library,
|
||||||
else:
|
whitelist_library,
|
||||||
logger(f"Jellyfin: Library {library} not found in library list", 2)
|
blacklist_library_type,
|
||||||
continue
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
library_id = None
|
)
|
||||||
for jellyfin_library in jellyfin_libraries:
|
|
||||||
if jellyfin_library["Name"] == library:
|
if skip_reason:
|
||||||
library_id = jellyfin_library["Id"]
|
logger(
|
||||||
continue
|
f"Jellyfin: Skipping library {library_title} {skip_reason}",
|
||||||
|
1,
|
||||||
if library_id:
|
)
|
||||||
args.append([self.update_user_watched, user, user_id, library, library_id, videos, dryrun])
|
continue
|
||||||
|
|
||||||
future_thread_executor(args)
|
# Get watched for user
|
||||||
except Exception as e:
|
task = asyncio.ensure_future(
|
||||||
logger(f"Jellyfin: Error updating watched", 2)
|
self.get_user_watched(
|
||||||
raise Exception(e)
|
user_name, user_id, library_type, library_id, library_title
|
||||||
|
)
|
||||||
|
)
|
||||||
|
tasks_watched.append(task)
|
||||||
|
|
||||||
|
watched = await asyncio.gather(*tasks_watched, return_exceptions=True)
|
||||||
|
return watched
|
||||||
|
except Exception as e:
|
||||||
|
logger(f"Jellyfin: Failed to get users watched, Error: {e}", 2)
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
async def get_watched(
|
||||||
|
self,
|
||||||
|
users,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping=None,
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
users_watched = {}
|
||||||
|
watched = []
|
||||||
|
|
||||||
|
for user_name, user_id in users.items():
|
||||||
|
watched.append(
|
||||||
|
await self.get_users_watched(
|
||||||
|
user_name,
|
||||||
|
user_id,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for user_watched in watched:
|
||||||
|
user_watched_temp = combine_watched_dicts(user_watched)
|
||||||
|
for user, user_watched_temp in user_watched_temp.items():
|
||||||
|
if user not in users_watched:
|
||||||
|
users_watched[user] = {}
|
||||||
|
users_watched[user].update(user_watched_temp)
|
||||||
|
|
||||||
|
return users_watched
|
||||||
|
except Exception as e:
|
||||||
|
logger(f"Jellyfin: Failed to get watched, Error: {e}", 2)
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
async def update_user_watched(
|
||||||
|
self, user_name, user_id, library, library_id, videos, dryrun
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Updating watched for {user_name} in library {library}", 1
|
||||||
|
)
|
||||||
|
(
|
||||||
|
videos_shows_ids,
|
||||||
|
videos_episodes_ids,
|
||||||
|
videos_movies_ids,
|
||||||
|
) = generate_library_guids_dict(videos)
|
||||||
|
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: mark list\nShows: {videos_shows_ids}\nEpisodes: {videos_episodes_ids}\nMovies: {videos_movies_ids}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
if videos_movies_ids:
|
||||||
|
jellyfin_search = await self.query(
|
||||||
|
f"/Users/{user_id}/Items"
|
||||||
|
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=false&ParentId={library_id}"
|
||||||
|
+ "&isPlayed=false&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||||
|
"get",
|
||||||
|
session,
|
||||||
|
)
|
||||||
|
for jellyfin_video in jellyfin_search["Items"]:
|
||||||
|
movie_found = False
|
||||||
|
|
||||||
|
if "MediaSources" in jellyfin_video:
|
||||||
|
for movie_location in jellyfin_video["MediaSources"]:
|
||||||
|
if (
|
||||||
|
movie_location["Path"].split("/")[-1]
|
||||||
|
in videos_movies_ids["locations"]
|
||||||
|
):
|
||||||
|
movie_found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not movie_found:
|
||||||
|
for (
|
||||||
|
movie_provider_source,
|
||||||
|
movie_provider_id,
|
||||||
|
) in jellyfin_video["ProviderIds"].items():
|
||||||
|
if movie_provider_source.lower() in videos_movies_ids:
|
||||||
|
if (
|
||||||
|
movie_provider_id.lower()
|
||||||
|
in videos_movies_ids[
|
||||||
|
movie_provider_source.lower()
|
||||||
|
]
|
||||||
|
):
|
||||||
|
movie_found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if movie_found:
|
||||||
|
jellyfin_video_id = jellyfin_video["Id"]
|
||||||
|
msg = f"{jellyfin_video['Name']} as watched for {user_name} in {library} for Jellyfin"
|
||||||
|
if not dryrun:
|
||||||
|
logger(f"Marking {msg}", 0)
|
||||||
|
await self.query(
|
||||||
|
f"/Users/{user_id}/PlayedItems/{jellyfin_video_id}",
|
||||||
|
"post",
|
||||||
|
session,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger(f"Dryrun {msg}", 0)
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Skipping movie {jellyfin_video['Name']} as it is not in mark list for {user_name}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
|
||||||
|
# TV Shows
|
||||||
|
if videos_shows_ids and videos_episodes_ids:
|
||||||
|
jellyfin_search = await self.query(
|
||||||
|
f"/Users/{user_id}/Items"
|
||||||
|
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=false&ParentId={library_id}"
|
||||||
|
+ "&isPlayed=false&Fields=ItemCounts,ProviderIds,Path",
|
||||||
|
"get",
|
||||||
|
session,
|
||||||
|
)
|
||||||
|
jellyfin_shows = [x for x in jellyfin_search["Items"]]
|
||||||
|
|
||||||
|
for jellyfin_show in jellyfin_shows:
|
||||||
|
show_found = False
|
||||||
|
|
||||||
|
if "Path" in jellyfin_show:
|
||||||
|
if (
|
||||||
|
jellyfin_show["Path"].split("/")[-1]
|
||||||
|
in videos_shows_ids["locations"]
|
||||||
|
):
|
||||||
|
show_found = True
|
||||||
|
|
||||||
|
if not show_found:
|
||||||
|
for show_provider_source, show_provider_id in jellyfin_show[
|
||||||
|
"ProviderIds"
|
||||||
|
].items():
|
||||||
|
if show_provider_source.lower() in videos_shows_ids:
|
||||||
|
if (
|
||||||
|
show_provider_id.lower()
|
||||||
|
in videos_shows_ids[
|
||||||
|
show_provider_source.lower()
|
||||||
|
]
|
||||||
|
):
|
||||||
|
show_found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if show_found:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Updating watched for {user_name} in library {library} for show {jellyfin_show['Name']}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
jellyfin_show_id = jellyfin_show["Id"]
|
||||||
|
jellyfin_episodes = await self.query(
|
||||||
|
f"/Shows/{jellyfin_show_id}/Episodes"
|
||||||
|
+ f"?userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||||
|
"get",
|
||||||
|
session,
|
||||||
|
)
|
||||||
|
|
||||||
|
for jellyfin_episode in jellyfin_episodes["Items"]:
|
||||||
|
episode_found = False
|
||||||
|
|
||||||
|
if "MediaSources" in jellyfin_episode:
|
||||||
|
for episode_location in jellyfin_episode[
|
||||||
|
"MediaSources"
|
||||||
|
]:
|
||||||
|
if (
|
||||||
|
episode_location["Path"].split("/")[-1]
|
||||||
|
in videos_episodes_ids["locations"]
|
||||||
|
):
|
||||||
|
episode_found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not episode_found:
|
||||||
|
for (
|
||||||
|
episode_provider_source,
|
||||||
|
episode_provider_id,
|
||||||
|
) in jellyfin_episode["ProviderIds"].items():
|
||||||
|
if (
|
||||||
|
episode_provider_source.lower()
|
||||||
|
in videos_episodes_ids
|
||||||
|
):
|
||||||
|
if (
|
||||||
|
episode_provider_id.lower()
|
||||||
|
in videos_episodes_ids[
|
||||||
|
episode_provider_source.lower()
|
||||||
|
]
|
||||||
|
):
|
||||||
|
episode_found = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if episode_found:
|
||||||
|
jellyfin_episode_id = jellyfin_episode["Id"]
|
||||||
|
msg = (
|
||||||
|
f"{jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode['Name']}"
|
||||||
|
+ f" as watched for {user_name} in {library} for Jellyfin"
|
||||||
|
)
|
||||||
|
if not dryrun:
|
||||||
|
logger(f"Marked {msg}", 0)
|
||||||
|
await self.query(
|
||||||
|
f"/Users/{user_id}/PlayedItems/{jellyfin_episode_id}",
|
||||||
|
"post",
|
||||||
|
session,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger(f"Dryrun {msg}", 0)
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Skipping episode {jellyfin_episode['Name']} as it is not in mark list for {user_name}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Skipping show {jellyfin_show['Name']} as it is not in mark list for {user_name}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
not videos_movies_ids
|
||||||
|
and not videos_shows_ids
|
||||||
|
and not videos_episodes_ids
|
||||||
|
):
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: No videos to mark as watched for {user_name} in library {library}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Error updating watched for {user_name} in library {library}, {e}",
|
||||||
|
2,
|
||||||
|
)
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
async def update_watched(
|
||||||
|
self, watched_list, user_mapping=None, library_mapping=None, dryrun=False
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
tasks = []
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
for user, libraries in watched_list.items():
|
||||||
|
logger(f"Jellyfin: Updating for entry {user}, {libraries}", 1)
|
||||||
|
user_other = None
|
||||||
|
user_name = None
|
||||||
|
if user_mapping:
|
||||||
|
if user in user_mapping.keys():
|
||||||
|
user_other = user_mapping[user]
|
||||||
|
elif user in user_mapping.values():
|
||||||
|
user_other = search_mapping(user_mapping, user)
|
||||||
|
|
||||||
|
user_id = None
|
||||||
|
for key in self.users.keys():
|
||||||
|
if user.lower() == key.lower():
|
||||||
|
user_id = self.users[key]
|
||||||
|
user_name = key
|
||||||
|
break
|
||||||
|
elif user_other and user_other.lower() == key.lower():
|
||||||
|
user_id = self.users[key]
|
||||||
|
user_name = key
|
||||||
|
break
|
||||||
|
|
||||||
|
if not user_id:
|
||||||
|
logger(f"{user} {user_other} not found in Jellyfin", 2)
|
||||||
|
continue
|
||||||
|
|
||||||
|
jellyfin_libraries = await self.query(
|
||||||
|
f"/Users/{user_id}/Views", "get", session
|
||||||
|
)
|
||||||
|
jellyfin_libraries = [x for x in jellyfin_libraries["Items"]]
|
||||||
|
|
||||||
|
for library, videos in libraries.items():
|
||||||
|
library_other = None
|
||||||
|
if library_mapping:
|
||||||
|
if library in library_mapping.keys():
|
||||||
|
library_other = library_mapping[library]
|
||||||
|
elif library in library_mapping.values():
|
||||||
|
library_other = search_mapping(library_mapping, library)
|
||||||
|
|
||||||
|
if library.lower() not in [
|
||||||
|
x["Name"].lower() for x in jellyfin_libraries
|
||||||
|
]:
|
||||||
|
if library_other:
|
||||||
|
if library_other.lower() in [
|
||||||
|
x["Name"].lower() for x in jellyfin_libraries
|
||||||
|
]:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Library {library} not found, but {library_other} found, using {library_other}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
library = library_other
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Library {library} or {library_other} not found in library list",
|
||||||
|
2,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Jellyfin: Library {library} not found in library list",
|
||||||
|
2,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
library_id = None
|
||||||
|
for jellyfin_library in jellyfin_libraries:
|
||||||
|
if jellyfin_library["Name"] == library:
|
||||||
|
library_id = jellyfin_library["Id"]
|
||||||
|
continue
|
||||||
|
|
||||||
|
if library_id:
|
||||||
|
task = self.update_user_watched(
|
||||||
|
user_name, user_id, library, library_id, videos, dryrun
|
||||||
|
)
|
||||||
|
tasks.append(task)
|
||||||
|
|
||||||
|
await asyncio.gather(*tasks, return_exceptions=True)
|
||||||
|
except Exception as e:
|
||||||
|
logger(f"Jellyfin: Error updating watched, {e}", 2)
|
||||||
|
raise Exception(e)
|
||||||
|
|||||||
1103
src/main.py
1103
src/main.py
File diff suppressed because it is too large
Load Diff
735
src/plex.py
735
src/plex.py
@@ -1,305 +1,430 @@
|
|||||||
import re
|
import re, requests
|
||||||
|
|
||||||
from plexapi.server import PlexServer
|
from plexapi.server import PlexServer
|
||||||
from plexapi.myplex import MyPlexAccount
|
from plexapi.myplex import MyPlexAccount
|
||||||
|
|
||||||
from src.functions import logger, search_mapping, check_skip_logic, generate_library_guids_dict, future_thread_executor
|
from src.functions import (
|
||||||
|
logger,
|
||||||
|
search_mapping,
|
||||||
# class plex accept base url and token and username and password but default with none
|
check_skip_logic,
|
||||||
class Plex:
|
generate_library_guids_dict,
|
||||||
def __init__(self, baseurl=None, token=None, username=None, password=None, servername=None):
|
future_thread_executor,
|
||||||
self.baseurl = baseurl
|
)
|
||||||
self.token = token
|
|
||||||
self.username = username
|
|
||||||
self.password = password
|
def get_user_watched(user, user_plex, library):
|
||||||
self.servername = servername
|
try:
|
||||||
self.plex = self.login()
|
user_name = user.title.lower()
|
||||||
self.admin_user = self.plex.myPlexAccount()
|
user_watched = {}
|
||||||
self.users = self.get_users()
|
user_watched[user_name] = {}
|
||||||
|
|
||||||
def login(self):
|
logger(
|
||||||
try:
|
f"Plex: Generating watched for {user_name} in library {library.title}",
|
||||||
if self.baseurl and self.token:
|
0,
|
||||||
# Login via token
|
)
|
||||||
plex = PlexServer(self.baseurl, self.token)
|
|
||||||
elif self.username and self.password and self.servername:
|
if library.type == "movie":
|
||||||
# Login via plex account
|
user_watched[user_name][library.title] = []
|
||||||
account = MyPlexAccount(self.username, self.password)
|
|
||||||
plex = account.resource(self.servername).connect()
|
library_videos = user_plex.library.section(library.title)
|
||||||
else:
|
for video in library_videos.search(unwatched=False):
|
||||||
raise Exception("No complete plex credentials provided")
|
movie_guids = {}
|
||||||
|
for guid in video.guids:
|
||||||
return plex
|
guid_source = re.search(r"(.*)://", guid.id).group(1).lower()
|
||||||
except Exception as e:
|
guid_id = re.search(r"://(.*)", guid.id).group(1)
|
||||||
if self.username or self.password:
|
movie_guids[guid_source] = guid_id
|
||||||
msg = f"Failed to login via plex account {self.username}"
|
|
||||||
logger(f"Plex: Failed to login, {msg}, Error: {e}", 2)
|
movie_guids["title"] = video.title
|
||||||
else:
|
movie_guids["locations"] = tuple(
|
||||||
logger(f"Plex: Failed to login, Error: {e}", 2)
|
[x.split("/")[-1] for x in video.locations]
|
||||||
raise Exception(e)
|
)
|
||||||
|
|
||||||
|
user_watched[user_name][library.title].append(movie_guids)
|
||||||
def get_users(self):
|
|
||||||
try:
|
elif library.type == "show":
|
||||||
users = self.plex.myPlexAccount().users()
|
user_watched[user_name][library.title] = {}
|
||||||
|
|
||||||
# append self to users
|
library_videos = user_plex.library.section(library.title)
|
||||||
users.append(self.plex.myPlexAccount())
|
for show in library_videos.search(unwatched=False):
|
||||||
|
show_guids = {}
|
||||||
return users
|
for show_guid in show.guids:
|
||||||
except Exception as e:
|
# Extract after :// from guid.id
|
||||||
logger(f"Plex: Failed to get users, Error: {e}", 2)
|
show_guid_source = (
|
||||||
raise Exception(e)
|
re.search(r"(.*)://", show_guid.id).group(1).lower()
|
||||||
|
)
|
||||||
def get_user_watched(self, user, user_plex, library):
|
show_guid_id = re.search(r"://(.*)", show_guid.id).group(1)
|
||||||
try:
|
show_guids[show_guid_source] = show_guid_id
|
||||||
user_name = user.title.lower()
|
|
||||||
user_watched = {}
|
show_guids["title"] = show.title
|
||||||
user_watched[user_name] = {}
|
show_guids["locations"] = tuple(
|
||||||
|
[x.split("/")[-1] for x in show.locations]
|
||||||
logger(f"Plex: Generating watched for {user_name} in library {library.title}", 0)
|
)
|
||||||
|
show_guids = frozenset(show_guids.items())
|
||||||
if library.type == "movie":
|
|
||||||
user_watched[user_name][library.title] = []
|
for season in show.seasons():
|
||||||
|
episode_guids = []
|
||||||
library_videos = user_plex.library.section(library.title)
|
for episode in season.episodes():
|
||||||
for video in library_videos.search(unwatched=False):
|
if episode.viewCount > 0:
|
||||||
movie_guids = {}
|
episode_guids_temp = {}
|
||||||
for guid in video.guids:
|
for guid in episode.guids:
|
||||||
guid_source = re.search(r'(.*)://', guid.id).group(1).lower()
|
# Extract after :// from guid.id
|
||||||
guid_id = re.search(r'://(.*)', guid.id).group(1)
|
guid_source = (
|
||||||
movie_guids[guid_source] = guid_id
|
re.search(r"(.*)://", guid.id).group(1).lower()
|
||||||
|
)
|
||||||
movie_guids["title"] = video.title
|
guid_id = re.search(r"://(.*)", guid.id).group(1)
|
||||||
movie_guids["locations"] = tuple([x.split("/")[-1] for x in video.locations])
|
episode_guids_temp[guid_source] = guid_id
|
||||||
|
|
||||||
user_watched[user_name][library.title].append(movie_guids)
|
episode_guids_temp["locations"] = tuple(
|
||||||
|
[x.split("/")[-1] for x in episode.locations]
|
||||||
elif library.type == "show":
|
)
|
||||||
user_watched[user_name][library.title] = {}
|
episode_guids.append(episode_guids_temp)
|
||||||
|
|
||||||
library_videos = user_plex.library.section(library.title)
|
if episode_guids:
|
||||||
for show in library_videos.search(unwatched=False):
|
# append show, season, episode
|
||||||
show_guids = {}
|
if show_guids not in user_watched[user_name][library.title]:
|
||||||
for show_guid in show.guids:
|
user_watched[user_name][library.title][show_guids] = {}
|
||||||
# Extract after :// from guid.id
|
if (
|
||||||
show_guid_source = re.search(r'(.*)://', show_guid.id).group(1).lower()
|
season.title
|
||||||
show_guid_id = re.search(r'://(.*)', show_guid.id).group(1)
|
not in user_watched[user_name][library.title][show_guids]
|
||||||
show_guids[show_guid_source] = show_guid_id
|
):
|
||||||
|
user_watched[user_name][library.title][show_guids][
|
||||||
show_guids["title"] = show.title
|
season.title
|
||||||
show_guids["locations"] = tuple([x.split("/")[-1] for x in show.locations])
|
] = {}
|
||||||
show_guids = frozenset(show_guids.items())
|
user_watched[user_name][library.title][show_guids][
|
||||||
|
season.title
|
||||||
for season in show.seasons():
|
] = episode_guids
|
||||||
episode_guids = []
|
|
||||||
for episode in season.episodes():
|
return user_watched
|
||||||
if episode.viewCount > 0:
|
except Exception as e:
|
||||||
episode_guids_temp = {}
|
logger(
|
||||||
for guid in episode.guids:
|
f"Plex: Failed to get watched for {user_name} in library {library.title}, Error: {e}",
|
||||||
# Extract after :// from guid.id
|
2,
|
||||||
guid_source = re.search(r'(.*)://', guid.id).group(1).lower()
|
)
|
||||||
guid_id = re.search(r'://(.*)', guid.id).group(1)
|
raise Exception(e)
|
||||||
episode_guids_temp[guid_source] = guid_id
|
|
||||||
|
|
||||||
episode_guids_temp["locations"] = tuple([x.split("/")[-1] for x in episode.locations])
|
def update_user_watched(user, user_plex, library, videos, dryrun):
|
||||||
episode_guids.append(episode_guids_temp)
|
try:
|
||||||
|
logger(f"Plex: Updating watched for {user.title} in library {library}", 1)
|
||||||
if episode_guids:
|
(
|
||||||
# append show, season, episode
|
videos_shows_ids,
|
||||||
if show_guids not in user_watched[user_name][library.title]:
|
videos_episodes_ids,
|
||||||
user_watched[user_name][library.title][show_guids] = {}
|
videos_movies_ids,
|
||||||
if season.title not in user_watched[user_name][library.title][show_guids]:
|
) = generate_library_guids_dict(videos)
|
||||||
user_watched[user_name][library.title][show_guids][season.title] = {}
|
logger(
|
||||||
user_watched[user_name][library.title][show_guids][season.title] = episode_guids
|
f"Plex: mark list\nShows: {videos_shows_ids}\nEpisodes: {videos_episodes_ids}\nMovies: {videos_movies_ids}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
return user_watched
|
|
||||||
except Exception as e:
|
library_videos = user_plex.library.section(library)
|
||||||
logger(f"Plex: Failed to get watched for {user_name} in library {library.title}, Error: {e}", 2)
|
if videos_movies_ids:
|
||||||
raise Exception(e)
|
for movies_search in library_videos.search(unwatched=True):
|
||||||
|
movie_found = False
|
||||||
|
for movie_location in movies_search.locations:
|
||||||
def get_watched(self, users, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping):
|
if movie_location.split("/")[-1] in videos_movies_ids["locations"]:
|
||||||
try:
|
movie_found = True
|
||||||
# Get all libraries
|
break
|
||||||
users_watched = {}
|
|
||||||
args = []
|
if not movie_found:
|
||||||
|
for movie_guid in movies_search.guids:
|
||||||
for user in users:
|
movie_guid_source = (
|
||||||
if self.admin_user == user:
|
re.search(r"(.*)://", movie_guid.id).group(1).lower()
|
||||||
user_plex = self.plex
|
)
|
||||||
else:
|
movie_guid_id = re.search(r"://(.*)", movie_guid.id).group(1)
|
||||||
user_plex = PlexServer(self.plex._baseurl, user.get_token(self.plex.machineIdentifier))
|
|
||||||
|
# If movie provider source and movie provider id are in videos_movie_ids exactly, then the movie is in the list
|
||||||
libraries = user_plex.library.sections()
|
if movie_guid_source in videos_movies_ids.keys():
|
||||||
|
if movie_guid_id in videos_movies_ids[movie_guid_source]:
|
||||||
for library in libraries:
|
movie_found = True
|
||||||
library_title = library.title
|
break
|
||||||
library_type = library.type
|
|
||||||
|
if movie_found:
|
||||||
skip_reason = check_skip_logic(library_title, library_type, blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, library_mapping)
|
msg = f"{movies_search.title} as watched for {user.title} in {library} for Plex"
|
||||||
|
if not dryrun:
|
||||||
if skip_reason:
|
logger(f"Marked {msg}", 0)
|
||||||
logger(f"Plex: Skipping library {library_title} {skip_reason}", 1)
|
movies_search.markWatched()
|
||||||
continue
|
else:
|
||||||
|
logger(f"Dryrun {msg}", 0)
|
||||||
args.append([self.get_user_watched, user, user_plex, library])
|
else:
|
||||||
|
logger(
|
||||||
for user_watched in future_thread_executor(args):
|
f"Plex: Skipping movie {movies_search.title} as it is not in mark list for {user.title}",
|
||||||
for user, user_watched_temp in user_watched.items():
|
1,
|
||||||
if user not in users_watched:
|
)
|
||||||
users_watched[user] = {}
|
|
||||||
users_watched[user].update(user_watched_temp)
|
if videos_shows_ids and videos_episodes_ids:
|
||||||
|
for show_search in library_videos.search(unwatched=True):
|
||||||
return users_watched
|
show_found = False
|
||||||
except Exception as e:
|
for show_location in show_search.locations:
|
||||||
logger(f"Plex: Failed to get watched, Error: {e}", 2)
|
if show_location.split("/")[-1] in videos_shows_ids["locations"]:
|
||||||
raise Exception(e)
|
show_found = True
|
||||||
|
break
|
||||||
|
|
||||||
def update_user_watched (self, user, user_plex, library, videos, dryrun):
|
if not show_found:
|
||||||
try:
|
for show_guid in show_search.guids:
|
||||||
logger(f"Plex: Updating watched for {user.title} in library {library}", 1)
|
show_guid_source = (
|
||||||
library_videos = user_plex.library.section(library)
|
re.search(r"(.*)://", show_guid.id).group(1).lower()
|
||||||
|
)
|
||||||
if library_videos.type == "movie":
|
show_guid_id = re.search(r"://(.*)", show_guid.id).group(1)
|
||||||
_, _, videos_movies_ids = generate_library_guids_dict(videos, 2)
|
|
||||||
for movies_search in library_videos.search(unwatched=True):
|
# If show provider source and show provider id are in videos_shows_ids exactly, then the show is in the list
|
||||||
movie_found = False
|
if show_guid_source in videos_shows_ids.keys():
|
||||||
for movie_location in movies_search.locations:
|
if show_guid_id in videos_shows_ids[show_guid_source]:
|
||||||
if movie_location.split("/")[-1] in videos_movies_ids["locations"]:
|
show_found = True
|
||||||
movie_found = True
|
break
|
||||||
break
|
|
||||||
|
if show_found:
|
||||||
if not movie_found:
|
for episode_search in show_search.episodes():
|
||||||
for movie_guid in movies_search.guids:
|
episode_found = False
|
||||||
movie_guid_source = re.search(r'(.*)://', movie_guid.id).group(1).lower()
|
|
||||||
movie_guid_id = re.search(r'://(.*)', movie_guid.id).group(1)
|
for episode_location in episode_search.locations:
|
||||||
|
if (
|
||||||
# If movie provider source and movie provider id are in videos_movie_ids exactly, then the movie is in the list
|
episode_location.split("/")[-1]
|
||||||
if movie_guid_source in videos_movies_ids.keys():
|
in videos_episodes_ids["locations"]
|
||||||
if movie_guid_id in videos_movies_ids[movie_guid_source]:
|
):
|
||||||
movie_found = True
|
episode_found = True
|
||||||
break
|
break
|
||||||
|
|
||||||
if movie_found:
|
if not episode_found:
|
||||||
if movies_search.viewCount == 0:
|
for episode_guid in episode_search.guids:
|
||||||
msg = f"{movies_search.title} as watched for {user.title} in {library} for Plex"
|
episode_guid_source = (
|
||||||
if not dryrun:
|
re.search(r"(.*)://", episode_guid.id)
|
||||||
logger(f"Marked {msg}", 0)
|
.group(1)
|
||||||
movies_search.markWatched()
|
.lower()
|
||||||
else:
|
)
|
||||||
logger(f"Dryrun {msg}", 0)
|
episode_guid_id = re.search(
|
||||||
|
r"://(.*)", episode_guid.id
|
||||||
|
).group(1)
|
||||||
elif library_videos.type == "show":
|
|
||||||
videos_shows_ids, videos_episode_ids, _ = generate_library_guids_dict(videos, 3)
|
# If episode provider source and episode provider id are in videos_episodes_ids exactly, then the episode is in the list
|
||||||
|
if episode_guid_source in videos_episodes_ids.keys():
|
||||||
for show_search in library_videos.search(unwatched=True):
|
if (
|
||||||
show_found = False
|
episode_guid_id
|
||||||
for show_location in show_search.locations:
|
in videos_episodes_ids[episode_guid_source]
|
||||||
if show_location.split("/")[-1] in videos_shows_ids["locations"]:
|
):
|
||||||
show_found = True
|
episode_found = True
|
||||||
break
|
break
|
||||||
|
|
||||||
if not show_found:
|
if episode_found:
|
||||||
for show_guid in show_search.guids:
|
msg = f"{show_search.title} {episode_search.title} as watched for {user.title} in {library} for Plex"
|
||||||
show_guid_source = re.search(r'(.*)://', show_guid.id).group(1).lower()
|
if not dryrun:
|
||||||
show_guid_id = re.search(r'://(.*)', show_guid.id).group(1)
|
logger(f"Marked {msg}", 0)
|
||||||
|
episode_search.markWatched()
|
||||||
# If show provider source and show provider id are in videos_shows_ids exactly, then the show is in the list
|
else:
|
||||||
if show_guid_source in videos_shows_ids.keys():
|
logger(f"Dryrun {msg}", 0)
|
||||||
if show_guid_id in videos_shows_ids[show_guid_source]:
|
else:
|
||||||
show_found = True
|
logger(
|
||||||
break
|
f"Plex: Skipping episode {episode_search.title} as it is not in mark list for {user.title}",
|
||||||
|
1,
|
||||||
if show_found:
|
)
|
||||||
for episode_search in show_search.episodes():
|
else:
|
||||||
episode_found = False
|
logger(
|
||||||
|
f"Plex: Skipping show {show_search.title} as it is not in mark list for {user.title}",
|
||||||
for episode_location in episode_search.locations:
|
1,
|
||||||
if episode_location.split("/")[-1] in videos_episode_ids["locations"]:
|
)
|
||||||
episode_found = True
|
|
||||||
break
|
if not videos_movies_ids and not videos_shows_ids and not videos_episodes_ids:
|
||||||
|
logger(
|
||||||
if not episode_found:
|
f"Jellyfin: No videos to mark as watched for {user.title} in library {library}",
|
||||||
for episode_guid in episode_search.guids:
|
1,
|
||||||
episode_guid_source = re.search(r'(.*)://', episode_guid.id).group(1).lower()
|
)
|
||||||
episode_guid_id = re.search(r'://(.*)', episode_guid.id).group(1)
|
|
||||||
|
except Exception as e:
|
||||||
# If episode provider source and episode provider id are in videos_episode_ids exactly, then the episode is in the list
|
logger(
|
||||||
if episode_guid_source in videos_episode_ids.keys():
|
f"Plex: Failed to update watched for {user.title} in library {library}, Error: {e}",
|
||||||
if episode_guid_id in videos_episode_ids[episode_guid_source]:
|
2,
|
||||||
episode_found = True
|
)
|
||||||
break
|
raise Exception(e)
|
||||||
|
|
||||||
if episode_found:
|
|
||||||
if episode_search.viewCount == 0:
|
# class plex accept base url and token and username and password but default with none
|
||||||
msg = f"{show_search.title} {episode_search.title} as watched for {user.title} in {library} for Plex"
|
class Plex:
|
||||||
if not dryrun:
|
def __init__(
|
||||||
logger(f"Marked {msg}", 0)
|
self,
|
||||||
episode_search.markWatched()
|
baseurl=None,
|
||||||
else:
|
token=None,
|
||||||
logger(f"Dryrun {msg}", 0)
|
username=None,
|
||||||
except Exception as e:
|
password=None,
|
||||||
logger(f"Plex: Failed to update watched for {user.title} in library {library}, Error: {e}", 2)
|
servername=None,
|
||||||
raise Exception(e)
|
ssl_bypass=False,
|
||||||
|
):
|
||||||
|
self.baseurl = baseurl
|
||||||
def update_watched(self, watched_list, user_mapping=None, library_mapping=None, dryrun=False):
|
self.token = token
|
||||||
try:
|
self.username = username
|
||||||
args = []
|
self.password = password
|
||||||
|
self.servername = servername
|
||||||
for user, libraries in watched_list.items():
|
self.plex = self.login(ssl_bypass)
|
||||||
user_other = None
|
self.admin_user = self.plex.myPlexAccount()
|
||||||
# If type of user is dict
|
self.users = self.get_users()
|
||||||
if user_mapping:
|
|
||||||
if user in user_mapping.keys():
|
def login(self, ssl_bypass=False):
|
||||||
user_other = user_mapping[user]
|
try:
|
||||||
elif user in user_mapping.values():
|
if self.baseurl and self.token:
|
||||||
user_other = search_mapping(user_mapping, user)
|
# Login via token
|
||||||
|
if ssl_bypass:
|
||||||
for index, value in enumerate(self.users):
|
session = requests.Session()
|
||||||
if user.lower() == value.title.lower():
|
session.verify = False
|
||||||
user = self.users[index]
|
plex = PlexServer(self.baseurl, self.token, session=session)
|
||||||
break
|
else:
|
||||||
elif user_other and user_other.lower() == value.title.lower():
|
plex = PlexServer(self.baseurl, self.token)
|
||||||
user = self.users[index]
|
elif self.username and self.password and self.servername:
|
||||||
break
|
# Login via plex account
|
||||||
|
account = MyPlexAccount(self.username, self.password)
|
||||||
if self.admin_user == user:
|
plex = account.resource(self.servername).connect()
|
||||||
user_plex = self.plex
|
else:
|
||||||
else:
|
raise Exception("No complete plex credentials provided")
|
||||||
user_plex = PlexServer(self.plex._baseurl, user.get_token(self.plex.machineIdentifier))
|
|
||||||
|
return plex
|
||||||
for library, videos in libraries.items():
|
except Exception as e:
|
||||||
library_other = None
|
if self.username or self.password:
|
||||||
if library_mapping:
|
msg = f"Failed to login via plex account {self.username}"
|
||||||
if library in library_mapping.keys():
|
logger(f"Plex: Failed to login, {msg}, Error: {e}", 2)
|
||||||
library_other = library_mapping[library]
|
else:
|
||||||
elif library in library_mapping.values():
|
logger(f"Plex: Failed to login, Error: {e}", 2)
|
||||||
library_other = search_mapping(library_mapping, library)
|
raise Exception(e)
|
||||||
|
|
||||||
# if library in plex library list
|
def get_users(self):
|
||||||
library_list = user_plex.library.sections()
|
try:
|
||||||
if library.lower() not in [x.title.lower() for x in library_list]:
|
users = self.plex.myPlexAccount().users()
|
||||||
if library_other:
|
|
||||||
if library_other.lower() in [x.title.lower() for x in library_list]:
|
# append self to users
|
||||||
logger(f"Plex: Library {library} not found, but {library_other} found, using {library_other}", 1)
|
users.append(self.plex.myPlexAccount())
|
||||||
library = library_other
|
|
||||||
else:
|
return users
|
||||||
logger(f"Plex: Library {library} or {library_other} not found in library list", 2)
|
except Exception as e:
|
||||||
continue
|
logger(f"Plex: Failed to get users, Error: {e}", 2)
|
||||||
else:
|
raise Exception(e)
|
||||||
logger(f"Plex: Library {library} not found in library list", 2)
|
|
||||||
continue
|
def get_watched(
|
||||||
|
self,
|
||||||
|
users,
|
||||||
args.append([self.update_user_watched, user, user_plex, library, videos, dryrun])
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
future_thread_executor(args)
|
blacklist_library_type,
|
||||||
except Exception as e:
|
whitelist_library_type,
|
||||||
logger(f"Plex: Failed to update watched, Error: {e}", 2)
|
library_mapping,
|
||||||
raise Exception(e)
|
):
|
||||||
|
try:
|
||||||
|
# Get all libraries
|
||||||
|
users_watched = {}
|
||||||
|
args = []
|
||||||
|
|
||||||
|
for user in users:
|
||||||
|
if self.admin_user == user:
|
||||||
|
user_plex = self.plex
|
||||||
|
else:
|
||||||
|
user_plex = PlexServer(
|
||||||
|
self.plex._baseurl, user.get_token(self.plex.machineIdentifier)
|
||||||
|
)
|
||||||
|
|
||||||
|
libraries = user_plex.library.sections()
|
||||||
|
|
||||||
|
for library in libraries:
|
||||||
|
library_title = library.title
|
||||||
|
library_type = library.type
|
||||||
|
|
||||||
|
skip_reason = check_skip_logic(
|
||||||
|
library_title,
|
||||||
|
library_type,
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
library_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
if skip_reason:
|
||||||
|
logger(
|
||||||
|
f"Plex: Skipping library {library_title} {skip_reason}", 1
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
args.append([get_user_watched, user, user_plex, library])
|
||||||
|
|
||||||
|
for user_watched in future_thread_executor(args):
|
||||||
|
for user, user_watched_temp in user_watched.items():
|
||||||
|
if user not in users_watched:
|
||||||
|
users_watched[user] = {}
|
||||||
|
users_watched[user].update(user_watched_temp)
|
||||||
|
|
||||||
|
return users_watched
|
||||||
|
except Exception as e:
|
||||||
|
logger(f"Plex: Failed to get watched, Error: {e}", 2)
|
||||||
|
raise Exception(e)
|
||||||
|
|
||||||
|
def update_watched(
|
||||||
|
self, watched_list, user_mapping=None, library_mapping=None, dryrun=False
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
args = []
|
||||||
|
|
||||||
|
for user, libraries in watched_list.items():
|
||||||
|
user_other = None
|
||||||
|
# If type of user is dict
|
||||||
|
if user_mapping:
|
||||||
|
if user in user_mapping.keys():
|
||||||
|
user_other = user_mapping[user]
|
||||||
|
elif user in user_mapping.values():
|
||||||
|
user_other = search_mapping(user_mapping, user)
|
||||||
|
|
||||||
|
for index, value in enumerate(self.users):
|
||||||
|
if user.lower() == value.title.lower():
|
||||||
|
user = self.users[index]
|
||||||
|
break
|
||||||
|
elif user_other and user_other.lower() == value.title.lower():
|
||||||
|
user = self.users[index]
|
||||||
|
break
|
||||||
|
|
||||||
|
if self.admin_user == user:
|
||||||
|
user_plex = self.plex
|
||||||
|
else:
|
||||||
|
user_plex = PlexServer(
|
||||||
|
self.plex._baseurl, user.get_token(self.plex.machineIdentifier)
|
||||||
|
)
|
||||||
|
|
||||||
|
for library, videos in libraries.items():
|
||||||
|
library_other = None
|
||||||
|
if library_mapping:
|
||||||
|
if library in library_mapping.keys():
|
||||||
|
library_other = library_mapping[library]
|
||||||
|
elif library in library_mapping.values():
|
||||||
|
library_other = search_mapping(library_mapping, library)
|
||||||
|
|
||||||
|
# if library in plex library list
|
||||||
|
library_list = user_plex.library.sections()
|
||||||
|
if library.lower() not in [x.title.lower() for x in library_list]:
|
||||||
|
if library_other:
|
||||||
|
if library_other.lower() in [
|
||||||
|
x.title.lower() for x in library_list
|
||||||
|
]:
|
||||||
|
logger(
|
||||||
|
f"Plex: Library {library} not found, but {library_other} found, using {library_other}",
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
library = library_other
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Plex: Library {library} or {library_other} not found in library list",
|
||||||
|
2,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
logger(
|
||||||
|
f"Plex: Library {library} not found in library list", 2
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
args.append(
|
||||||
|
[
|
||||||
|
update_user_watched,
|
||||||
|
user,
|
||||||
|
user_plex,
|
||||||
|
library,
|
||||||
|
videos,
|
||||||
|
dryrun,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
future_thread_executor(args)
|
||||||
|
except Exception as e:
|
||||||
|
logger(f"Plex: Failed to update watched, Error: {e}", 2)
|
||||||
|
raise Exception(e)
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
pytest
|
pytest
|
||||||
|
|||||||
78
test/test_main.py
Normal file
78
test/test_main.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
# getting the name of the directory
|
||||||
|
# where the this file is present.
|
||||||
|
current = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
|
# Getting the parent directory name
|
||||||
|
# where the current directory is present.
|
||||||
|
parent = os.path.dirname(current)
|
||||||
|
|
||||||
|
# adding the parent directory to
|
||||||
|
# the sys.path.
|
||||||
|
sys.path.append(parent)
|
||||||
|
|
||||||
|
from src.main import setup_black_white_lists
|
||||||
|
|
||||||
|
|
||||||
|
def test_setup_black_white_lists():
|
||||||
|
# Simple
|
||||||
|
blacklist_library = "library1, library2"
|
||||||
|
whitelist_library = "library1, library2"
|
||||||
|
blacklist_library_type = "library_type1, library_type2"
|
||||||
|
whitelist_library_type = "library_type1, library_type2"
|
||||||
|
blacklist_users = "user1, user2"
|
||||||
|
whitelist_users = "user1, user2"
|
||||||
|
|
||||||
|
(
|
||||||
|
results_blacklist_library,
|
||||||
|
return_whitelist_library,
|
||||||
|
return_blacklist_library_type,
|
||||||
|
return_whitelist_library_type,
|
||||||
|
return_blacklist_users,
|
||||||
|
return_whitelist_users,
|
||||||
|
) = setup_black_white_lists(
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
blacklist_users,
|
||||||
|
whitelist_users,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert results_blacklist_library == ["library1", "library2"]
|
||||||
|
assert return_whitelist_library == ["library1", "library2"]
|
||||||
|
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
||||||
|
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
||||||
|
assert return_blacklist_users == ["user1", "user2"]
|
||||||
|
assert return_whitelist_users == ["user1", "user2"]
|
||||||
|
|
||||||
|
# Library Mapping and user mapping
|
||||||
|
library_mapping = {"library1": "library3"}
|
||||||
|
user_mapping = {"user1": "user3"}
|
||||||
|
|
||||||
|
(
|
||||||
|
results_blacklist_library,
|
||||||
|
return_whitelist_library,
|
||||||
|
return_blacklist_library_type,
|
||||||
|
return_whitelist_library_type,
|
||||||
|
return_blacklist_users,
|
||||||
|
return_whitelist_users,
|
||||||
|
) = setup_black_white_lists(
|
||||||
|
blacklist_library,
|
||||||
|
whitelist_library,
|
||||||
|
blacklist_library_type,
|
||||||
|
whitelist_library_type,
|
||||||
|
blacklist_users,
|
||||||
|
whitelist_users,
|
||||||
|
library_mapping,
|
||||||
|
user_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert results_blacklist_library == ["library1", "library2", "library3"]
|
||||||
|
assert return_whitelist_library == ["library1", "library2", "library3"]
|
||||||
|
assert return_blacklist_library_type == ["library_type1", "library_type2"]
|
||||||
|
assert return_whitelist_library_type == ["library_type1", "library_type2"]
|
||||||
|
assert return_blacklist_users == ["user1", "user2", "user3"]
|
||||||
|
assert return_whitelist_users == ["user1", "user2", "user3"]
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
# getting the name of the directory
|
|
||||||
# where the this file is present.
|
|
||||||
current = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
|
|
||||||
# Getting the parent directory name
|
|
||||||
# where the current directory is present.
|
|
||||||
parent = os.path.dirname(current)
|
|
||||||
|
|
||||||
# adding the parent directory to
|
|
||||||
# the sys.path.
|
|
||||||
sys.path.append(parent)
|
|
||||||
|
|
||||||
from src.main import setup_black_white_lists
|
|
||||||
|
|
||||||
def test_setup_black_white_lists():
|
|
||||||
# Simple
|
|
||||||
blacklist_library = 'library1, library2'
|
|
||||||
whitelist_library = 'library1, library2'
|
|
||||||
blacklist_library_type = 'library_type1, library_type2'
|
|
||||||
whitelist_library_type = 'library_type1, library_type2'
|
|
||||||
blacklist_users = 'user1, user2'
|
|
||||||
whitelist_users = 'user1, user2'
|
|
||||||
|
|
||||||
results_blacklist_library, return_whitelist_library, return_blacklist_library_type, return_whitelist_library_type, return_blacklist_users, return_whitelist_users = setup_black_white_lists(blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, blacklist_users, whitelist_users)
|
|
||||||
|
|
||||||
assert results_blacklist_library == ['library1', 'library2']
|
|
||||||
assert return_whitelist_library == ['library1', 'library2']
|
|
||||||
assert return_blacklist_library_type == ['library_type1', 'library_type2']
|
|
||||||
assert return_whitelist_library_type == ['library_type1', 'library_type2']
|
|
||||||
assert return_blacklist_users == ['user1', 'user2']
|
|
||||||
assert return_whitelist_users == ['user1', 'user2']
|
|
||||||
|
|
||||||
# Library Mapping and user mapping
|
|
||||||
library_mapping = { "library1": "library3" }
|
|
||||||
user_mapping = { "user1": "user3" }
|
|
||||||
|
|
||||||
results_blacklist_library, return_whitelist_library, return_blacklist_library_type, return_whitelist_library_type, return_blacklist_users, return_whitelist_users = setup_black_white_lists(blacklist_library, whitelist_library, blacklist_library_type, whitelist_library_type, blacklist_users, whitelist_users, library_mapping, user_mapping)
|
|
||||||
|
|
||||||
assert results_blacklist_library == ['library1', 'library2', 'library3']
|
|
||||||
assert return_whitelist_library == ['library1', 'library2', 'library3']
|
|
||||||
assert return_blacklist_library_type == ['library_type1', 'library_type2']
|
|
||||||
assert return_whitelist_library_type == ['library_type1', 'library_type2']
|
|
||||||
assert return_blacklist_users == ['user1', 'user2', 'user3']
|
|
||||||
assert return_whitelist_users == ['user1', 'user2', 'user3']
|
|
||||||
@@ -1,176 +1,301 @@
|
|||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
|
||||||
# getting the name of the directory
|
# getting the name of the directory
|
||||||
# where the this file is present.
|
# where the this file is present.
|
||||||
current = os.path.dirname(os.path.realpath(__file__))
|
current = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
# Getting the parent directory name
|
# Getting the parent directory name
|
||||||
# where the current directory is present.
|
# where the current directory is present.
|
||||||
parent = os.path.dirname(current)
|
parent = os.path.dirname(current)
|
||||||
|
|
||||||
# adding the parent directory to
|
# adding the parent directory to
|
||||||
# the sys.path.
|
# the sys.path.
|
||||||
sys.path.append(parent)
|
sys.path.append(parent)
|
||||||
|
|
||||||
from src.main import cleanup_watched
|
from src.main import cleanup_watched
|
||||||
|
|
||||||
tv_shows_watched_list_1 = {
|
tv_shows_watched_list_1 = {
|
||||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
frozenset(
|
||||||
"Season 1": [
|
{
|
||||||
{'imdb': 'tt0550489', 'tmdb': '282843', 'tvdb': '176357', 'locations': ('Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv',)},
|
("tvdb", "75710"),
|
||||||
{'imdb': 'tt0550487', 'tmdb': '282861', 'tvdb': '300385', 'locations': ('Criminal Minds S01E02 Compulsion WEBDL-720p.mkv',)}
|
("title", "Criminal Minds"),
|
||||||
]
|
("imdb", "tt0452046"),
|
||||||
},
|
("locations", ("Criminal Minds",)),
|
||||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
("tmdb", "4057"),
|
||||||
"Season 1": [
|
}
|
||||||
{'locations': ('Test S01E01.mkv',)},
|
): {
|
||||||
{'locations': ('Test S01E02.mkv',)}
|
"Season 1": [
|
||||||
]
|
{
|
||||||
}
|
"imdb": "tt0550489",
|
||||||
}
|
"tmdb": "282843",
|
||||||
|
"tvdb": "176357",
|
||||||
movies_watched_list_1 = [
|
"locations": (
|
||||||
{"imdb":"tt2380307", "tmdb":"354912", 'title': 'Coco', 'locations': ('Coco (2017) Remux-1080p.mkv',)},
|
"Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",
|
||||||
{"tmdbcollection":"448150", "imdb":"tt1431045", "tmdb":"293660", 'title': 'Deadpool', 'locations': ('Deadpool (2016) Remux-1080p.mkv',)},
|
),
|
||||||
]
|
},
|
||||||
|
{
|
||||||
tv_shows_watched_list_2 = {
|
"imdb": "tt0550487",
|
||||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
"tmdb": "282861",
|
||||||
"Season 1": [
|
"tvdb": "300385",
|
||||||
{'imdb': 'tt0550487', 'tmdb': '282861', 'tvdb': '300385', 'locations': ('Criminal Minds S01E02 Compulsion WEBDL-720p.mkv',)},
|
"locations": ("Criminal Minds S01E02 Compulsion WEBDL-720p.mkv",),
|
||||||
{'imdb': 'tt0550498', 'tmdb': '282865', 'tvdb': '300474', 'locations': ("Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",)}
|
},
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||||
"Season 1": [
|
"Season 1": [
|
||||||
{'locations': ('Test S01E02.mkv',)},
|
{"locations": ("Test S01E01.mkv",)},
|
||||||
{'locations': ('Test S01E03.mkv',)}
|
{"locations": ("Test S01E02.mkv",)},
|
||||||
]
|
]
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
movies_watched_list_2 = [
|
movies_watched_list_1 = [
|
||||||
{"imdb":"tt2380307", "tmdb":"354912", 'title': 'Coco', 'locations': ('Coco (2017) Remux-1080p.mkv',)},
|
{
|
||||||
{'imdb': 'tt0384793', 'tmdb': '9788', 'tvdb': '9103', 'title': 'Accepted', 'locations': ('Accepted (2006) Remux-1080p.mkv',)}
|
"imdb": "tt2380307",
|
||||||
]
|
"tmdb": "354912",
|
||||||
|
"title": "Coco",
|
||||||
# Test to see if objects get deleted all the way up to the root.
|
"locations": ("Coco (2017) Remux-1080p.mkv",),
|
||||||
tv_shows_2_watched_list_1 = {
|
},
|
||||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
{
|
||||||
"Season 1": [
|
"tmdbcollection": "448150",
|
||||||
{'imdb': 'tt0550489', 'tmdb': '282843', 'tvdb': '176357', 'locations': ('Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv',)},
|
"imdb": "tt1431045",
|
||||||
]
|
"tmdb": "293660",
|
||||||
}
|
"title": "Deadpool",
|
||||||
}
|
"locations": ("Deadpool (2016) Remux-1080p.mkv",),
|
||||||
|
},
|
||||||
expected_tv_show_watched_list_1 = {
|
]
|
||||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
|
||||||
"Season 1": [
|
tv_shows_watched_list_2 = {
|
||||||
{'imdb': 'tt0550489', 'tmdb': '282843', 'tvdb': '176357', 'locations': ('Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv',)}
|
frozenset(
|
||||||
]
|
{
|
||||||
},
|
("tvdb", "75710"),
|
||||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
("title", "Criminal Minds"),
|
||||||
"Season 1": [
|
("imdb", "tt0452046"),
|
||||||
{'locations': ('Test S01E01.mkv',)}
|
("locations", ("Criminal Minds",)),
|
||||||
]
|
("tmdb", "4057"),
|
||||||
}
|
}
|
||||||
}
|
): {
|
||||||
|
"Season 1": [
|
||||||
expected_movie_watched_list_1 = [
|
{
|
||||||
{"tmdbcollection":"448150", "imdb":"tt1431045", "tmdb":"293660", 'title': 'Deadpool', 'locations': ('Deadpool (2016) Remux-1080p.mkv',)}
|
"imdb": "tt0550487",
|
||||||
]
|
"tmdb": "282861",
|
||||||
|
"tvdb": "300385",
|
||||||
expected_tv_show_watched_list_2 = {
|
"locations": ("Criminal Minds S01E02 Compulsion WEBDL-720p.mkv",),
|
||||||
frozenset({("tvdb", "75710"), ("title", "Criminal Minds"), ("imdb", "tt0452046"), ("locations", ("Criminal Minds",)), ("tmdb", "4057")}): {
|
},
|
||||||
"Season 1": [
|
{
|
||||||
{'imdb': 'tt0550498', 'tmdb': '282865', 'tvdb': '300474', 'locations': ("Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",)}
|
"imdb": "tt0550498",
|
||||||
]
|
"tmdb": "282865",
|
||||||
},
|
"tvdb": "300474",
|
||||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
"locations": (
|
||||||
"Season 1": [
|
"Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",
|
||||||
{'locations': ('Test S01E03.mkv',)}
|
),
|
||||||
]
|
},
|
||||||
}
|
]
|
||||||
}
|
},
|
||||||
|
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||||
expected_movie_watched_list_2 = [
|
"Season 1": [
|
||||||
{'imdb': 'tt0384793', 'tmdb': '9788', 'tvdb': '9103', 'title': 'Accepted', 'locations': ('Accepted (2006) Remux-1080p.mkv',)}
|
{"locations": ("Test S01E02.mkv",)},
|
||||||
]
|
{"locations": ("Test S01E03.mkv",)},
|
||||||
|
]
|
||||||
|
},
|
||||||
def test_simple_cleanup_watched():
|
}
|
||||||
user_watched_list_1 = {
|
|
||||||
"user1": {
|
movies_watched_list_2 = [
|
||||||
"TV Shows": tv_shows_watched_list_1,
|
{
|
||||||
"Movies": movies_watched_list_1,
|
"imdb": "tt2380307",
|
||||||
"Other Shows": tv_shows_2_watched_list_1
|
"tmdb": "354912",
|
||||||
},
|
"title": "Coco",
|
||||||
}
|
"locations": ("Coco (2017) Remux-1080p.mkv",),
|
||||||
user_watched_list_2 = {
|
},
|
||||||
"user1": {
|
{
|
||||||
"TV Shows": tv_shows_watched_list_2,
|
"imdb": "tt0384793",
|
||||||
"Movies": movies_watched_list_2,
|
"tmdb": "9788",
|
||||||
"Other Shows": tv_shows_2_watched_list_1
|
"tvdb": "9103",
|
||||||
}
|
"title": "Accepted",
|
||||||
}
|
"locations": ("Accepted (2006) Remux-1080p.mkv",),
|
||||||
|
},
|
||||||
expected_watched_list_1 = {
|
]
|
||||||
"user1": {
|
|
||||||
"TV Shows": expected_tv_show_watched_list_1
|
# Test to see if objects get deleted all the way up to the root.
|
||||||
, "Movies": expected_movie_watched_list_1
|
tv_shows_2_watched_list_1 = {
|
||||||
}
|
frozenset(
|
||||||
}
|
{
|
||||||
|
("tvdb", "75710"),
|
||||||
expected_watched_list_2 = {
|
("title", "Criminal Minds"),
|
||||||
"user1": {
|
("imdb", "tt0452046"),
|
||||||
"TV Shows": expected_tv_show_watched_list_2
|
("locations", ("Criminal Minds",)),
|
||||||
, "Movies": expected_movie_watched_list_2
|
("tmdb", "4057"),
|
||||||
}
|
}
|
||||||
}
|
): {
|
||||||
|
"Season 1": [
|
||||||
return_watched_list_1 = cleanup_watched(user_watched_list_1, user_watched_list_2)
|
{
|
||||||
return_watched_list_2 = cleanup_watched(user_watched_list_2, user_watched_list_1)
|
"imdb": "tt0550489",
|
||||||
|
"tmdb": "282843",
|
||||||
assert return_watched_list_1 == expected_watched_list_1
|
"tvdb": "176357",
|
||||||
assert return_watched_list_2 == expected_watched_list_2
|
"locations": (
|
||||||
|
"Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",
|
||||||
|
),
|
||||||
def test_mapping_cleanup_watched():
|
},
|
||||||
user_watched_list_1 = {
|
]
|
||||||
"user1": {
|
}
|
||||||
"TV Shows": tv_shows_watched_list_1,
|
}
|
||||||
"Movies": movies_watched_list_1,
|
|
||||||
"Other Shows": tv_shows_2_watched_list_1
|
expected_tv_show_watched_list_1 = {
|
||||||
},
|
frozenset(
|
||||||
}
|
{
|
||||||
user_watched_list_2 = {
|
("tvdb", "75710"),
|
||||||
"user2": {
|
("title", "Criminal Minds"),
|
||||||
"Shows": tv_shows_watched_list_2,
|
("imdb", "tt0452046"),
|
||||||
"Movies": movies_watched_list_2,
|
("locations", ("Criminal Minds",)),
|
||||||
"Other Shows": tv_shows_2_watched_list_1
|
("tmdb", "4057"),
|
||||||
}
|
}
|
||||||
}
|
): {
|
||||||
|
"Season 1": [
|
||||||
expected_watched_list_1 = {
|
{
|
||||||
"user1": {
|
"imdb": "tt0550489",
|
||||||
"TV Shows": expected_tv_show_watched_list_1
|
"tmdb": "282843",
|
||||||
, "Movies": expected_movie_watched_list_1
|
"tvdb": "176357",
|
||||||
}
|
"locations": (
|
||||||
}
|
"Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",
|
||||||
|
),
|
||||||
expected_watched_list_2 = {
|
}
|
||||||
"user2": {
|
]
|
||||||
"Shows": expected_tv_show_watched_list_2
|
},
|
||||||
, "Movies": expected_movie_watched_list_2
|
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||||
}
|
"Season 1": [{"locations": ("Test S01E01.mkv",)}]
|
||||||
}
|
},
|
||||||
|
}
|
||||||
user_mapping = { "user1": "user2" }
|
|
||||||
library_mapping = { "TV Shows": "Shows" }
|
expected_movie_watched_list_1 = [
|
||||||
|
{
|
||||||
return_watched_list_1 = cleanup_watched(user_watched_list_1, user_watched_list_2, user_mapping=user_mapping, library_mapping=library_mapping)
|
"tmdbcollection": "448150",
|
||||||
return_watched_list_2 = cleanup_watched(user_watched_list_2, user_watched_list_1, user_mapping=user_mapping, library_mapping=library_mapping)
|
"imdb": "tt1431045",
|
||||||
|
"tmdb": "293660",
|
||||||
assert return_watched_list_1 == expected_watched_list_1
|
"title": "Deadpool",
|
||||||
assert return_watched_list_2 == expected_watched_list_2
|
"locations": ("Deadpool (2016) Remux-1080p.mkv",),
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
expected_tv_show_watched_list_2 = {
|
||||||
|
frozenset(
|
||||||
|
{
|
||||||
|
("tvdb", "75710"),
|
||||||
|
("title", "Criminal Minds"),
|
||||||
|
("imdb", "tt0452046"),
|
||||||
|
("locations", ("Criminal Minds",)),
|
||||||
|
("tmdb", "4057"),
|
||||||
|
}
|
||||||
|
): {
|
||||||
|
"Season 1": [
|
||||||
|
{
|
||||||
|
"imdb": "tt0550498",
|
||||||
|
"tmdb": "282865",
|
||||||
|
"tvdb": "300474",
|
||||||
|
"locations": (
|
||||||
|
"Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",
|
||||||
|
),
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||||
|
"Season 1": [{"locations": ("Test S01E03.mkv",)}]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_movie_watched_list_2 = [
|
||||||
|
{
|
||||||
|
"imdb": "tt0384793",
|
||||||
|
"tmdb": "9788",
|
||||||
|
"tvdb": "9103",
|
||||||
|
"title": "Accepted",
|
||||||
|
"locations": ("Accepted (2006) Remux-1080p.mkv",),
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_simple_cleanup_watched():
|
||||||
|
user_watched_list_1 = {
|
||||||
|
"user1": {
|
||||||
|
"TV Shows": tv_shows_watched_list_1,
|
||||||
|
"Movies": movies_watched_list_1,
|
||||||
|
"Other Shows": tv_shows_2_watched_list_1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
user_watched_list_2 = {
|
||||||
|
"user1": {
|
||||||
|
"TV Shows": tv_shows_watched_list_2,
|
||||||
|
"Movies": movies_watched_list_2,
|
||||||
|
"Other Shows": tv_shows_2_watched_list_1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_watched_list_1 = {
|
||||||
|
"user1": {
|
||||||
|
"TV Shows": expected_tv_show_watched_list_1,
|
||||||
|
"Movies": expected_movie_watched_list_1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_watched_list_2 = {
|
||||||
|
"user1": {
|
||||||
|
"TV Shows": expected_tv_show_watched_list_2,
|
||||||
|
"Movies": expected_movie_watched_list_2,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return_watched_list_1 = cleanup_watched(user_watched_list_1, user_watched_list_2)
|
||||||
|
return_watched_list_2 = cleanup_watched(user_watched_list_2, user_watched_list_1)
|
||||||
|
|
||||||
|
assert return_watched_list_1 == expected_watched_list_1
|
||||||
|
assert return_watched_list_2 == expected_watched_list_2
|
||||||
|
|
||||||
|
|
||||||
|
def test_mapping_cleanup_watched():
|
||||||
|
user_watched_list_1 = {
|
||||||
|
"user1": {
|
||||||
|
"TV Shows": tv_shows_watched_list_1,
|
||||||
|
"Movies": movies_watched_list_1,
|
||||||
|
"Other Shows": tv_shows_2_watched_list_1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
user_watched_list_2 = {
|
||||||
|
"user2": {
|
||||||
|
"Shows": tv_shows_watched_list_2,
|
||||||
|
"Movies": movies_watched_list_2,
|
||||||
|
"Other Shows": tv_shows_2_watched_list_1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_watched_list_1 = {
|
||||||
|
"user1": {
|
||||||
|
"TV Shows": expected_tv_show_watched_list_1,
|
||||||
|
"Movies": expected_movie_watched_list_1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expected_watched_list_2 = {
|
||||||
|
"user2": {
|
||||||
|
"Shows": expected_tv_show_watched_list_2,
|
||||||
|
"Movies": expected_movie_watched_list_2,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
user_mapping = {"user1": "user2"}
|
||||||
|
library_mapping = {"TV Shows": "Shows"}
|
||||||
|
|
||||||
|
return_watched_list_1 = cleanup_watched(
|
||||||
|
user_watched_list_1,
|
||||||
|
user_watched_list_2,
|
||||||
|
user_mapping=user_mapping,
|
||||||
|
library_mapping=library_mapping,
|
||||||
|
)
|
||||||
|
return_watched_list_2 = cleanup_watched(
|
||||||
|
user_watched_list_2,
|
||||||
|
user_watched_list_1,
|
||||||
|
user_mapping=user_mapping,
|
||||||
|
library_mapping=library_mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert return_watched_list_1 == expected_watched_list_1
|
||||||
|
assert return_watched_list_2 == expected_watched_list_2
|
||||||
|
|||||||
Reference in New Issue
Block a user