Compare commits
155 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b3175305bd | ||
|
|
5b1933cb08 | ||
|
|
ae71ca0940 | ||
|
|
9b38729b95 | ||
|
|
402c286742 | ||
|
|
dcd4ac1d36 | ||
|
|
e6fbf746d8 | ||
|
|
803d248cb8 | ||
|
|
713be6970c | ||
|
|
62509f16db | ||
|
|
84899aef50 | ||
|
|
86b30e1887 | ||
|
|
033ef76cfe | ||
|
|
815596379c | ||
|
|
bc5e8bc65d | ||
|
|
b32de7259b | ||
|
|
29cb0cebd5 | ||
|
|
6744ebcb5b | ||
|
|
c6b026a82d | ||
|
|
cc706938ce | ||
|
|
84b98db36b | ||
|
|
01ad15e2bd | ||
|
|
54adf0e56f | ||
|
|
025e40b098 | ||
|
|
4534854001 | ||
|
|
362d54b471 | ||
|
|
fa533ff65e | ||
|
|
96fe367562 | ||
|
|
9566ffa384 | ||
|
|
f5835e1e72 | ||
|
|
fe65716706 | ||
|
|
873735900f | ||
|
|
28c166146e | ||
|
|
c6affc3108 | ||
|
|
59b49fd0df | ||
|
|
6ec003f899 | ||
|
|
95f2a9ad30 | ||
|
|
7317e8533d | ||
|
|
f80c20d70c | ||
|
|
01fc13c3e0 | ||
|
|
1edfecae42 | ||
|
|
9dab9a4632 | ||
|
|
98a824bfdc | ||
|
|
8fa9351ef1 | ||
|
|
64b2197844 | ||
|
|
26f1f80be7 | ||
|
|
2e4c2a6817 | ||
|
|
9498335e22 | ||
|
|
26f40110d0 | ||
|
|
9375d482b0 | ||
|
|
de9180a124 | ||
|
|
ba480d2cb7 | ||
|
|
5014748ee1 | ||
|
|
4e25ae5539 | ||
|
|
a2b802a5de | ||
|
|
9739b27718 | ||
|
|
bdf6476689 | ||
|
|
b8b627be1a | ||
|
|
03cad668aa | ||
|
|
2e0ec9aa38 | ||
|
|
4b02aae889 | ||
|
|
c91ba0b1b3 | ||
|
|
6b7f8b04e6 | ||
|
|
5472baab51 | ||
|
|
d5b6859bf8 | ||
|
|
8e23847c79 | ||
|
|
0c1579bae7 | ||
|
|
3dc50fff95 | ||
|
|
b8273f50c2 | ||
|
|
dbea28e9c6 | ||
|
|
a1b11ab039 | ||
|
|
1841b0dea6 | ||
|
|
b311bf2770 | ||
|
|
df13cef760 | ||
|
|
76ac264b25 | ||
|
|
93bc94add5 | ||
|
|
79325b8c61 | ||
|
|
58c1eb7004 | ||
|
|
466f292feb | ||
|
|
4de25a0d4a | ||
|
|
43d6bc0d82 | ||
|
|
b53d7c9ecc | ||
|
|
116d50a75a | ||
|
|
e1fb365096 | ||
|
|
03617dacfc | ||
|
|
e6b33f1bc9 | ||
|
|
d9e6a554f6 | ||
|
|
7ef37fe848 | ||
|
|
dd64617cbd | ||
|
|
a227c01a7f | ||
|
|
da53609385 | ||
|
|
e94a8fb2c3 | ||
|
|
d87542ab78 | ||
|
|
945ffb2fb3 | ||
|
|
da808ba25e | ||
|
|
e4b4c7ba39 | ||
|
|
43ead4bb0f | ||
|
|
c4a2f8af39 | ||
|
|
fd281a50b6 | ||
|
|
f8ef4fe6c9 | ||
|
|
faef0ae246 | ||
|
|
117932e272 | ||
|
|
4297708d3e | ||
|
|
2d00d8cb3e | ||
|
|
0190788658 | ||
|
|
b46d4a7166 | ||
|
|
994d529f59 | ||
|
|
7f347ae186 | ||
|
|
cd4ce186ca | ||
|
|
ca5403f97b | ||
|
|
7bb76f62a5 | ||
|
|
dcdbe44648 | ||
|
|
f91005f0ba | ||
|
|
5baea978ab | ||
|
|
9cc1f96eea | ||
|
|
2a65c4b5ca | ||
|
|
e1ef6615cc | ||
|
|
d607c9c821 | ||
|
|
f6b2186824 | ||
|
|
a3fc53059c | ||
|
|
6afe123947 | ||
|
|
7e9c6bb338 | ||
|
|
89a2768fc9 | ||
|
|
9ff3bdf302 | ||
|
|
2c48e89435 | ||
|
|
6ccb68aeb3 | ||
|
|
032243de0a | ||
|
|
5b1b9ec222 | ||
|
|
375c6b23a5 | ||
|
|
b378dff0dc | ||
|
|
23f2d287d6 | ||
|
|
3cd73e54a1 | ||
|
|
bf5d875079 | ||
|
|
aef884523b | ||
|
|
2a59f38faf | ||
|
|
3a0e60c772 | ||
|
|
fb657d41db | ||
|
|
ac7f389563 | ||
|
|
237e82eceb | ||
|
|
8fab4304a4 | ||
|
|
971c9e9147 | ||
|
|
cacbca5a07 | ||
|
|
e4dbd8adfb | ||
|
|
19f77c89e7 | ||
|
|
ce1b922f71 | ||
|
|
81e967864d | ||
|
|
29f55104bc | ||
|
|
ff2e2deb20 | ||
|
|
3fa55cb41b | ||
|
|
aa5d97a0d5 | ||
|
|
89c4f15ae8 | ||
|
|
1351bfc1cf | ||
|
|
32cc76f043 | ||
|
|
968cb2091d | ||
|
|
8986c1037b |
21
.env.sample
21
.env.sample
@@ -18,11 +18,30 @@ SLEEP_DURATION = "3600"
|
||||
## Log file where all output will be written to
|
||||
LOGFILE = "log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Generate guids
|
||||
## Generating guids is a slow process, so this is a way to speed up the process
|
||||
## by using the location only, useful when using same files on multiple servers
|
||||
GENERATE_GUIDS = "True"
|
||||
|
||||
## Generate locations
|
||||
## Generating locations is a slow process, so this is a way to speed up the process
|
||||
## by using the guid only, useful when using different files on multiple servers
|
||||
GENERATE_LOCATIONS = "True"
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 32
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma separated for multiple options
|
||||
#USER_MAPPING = { "testuser2": "testuser3", "testuser1":"testuser4" }
|
||||
|
||||
## Map libraries between servers in the even that they are different, order does not matter
|
||||
## Map libraries between servers in the event that they are different, order does not matter
|
||||
## Comma separated for multiple options
|
||||
#LIBRARY_MAPPING = { "Shows": "TV Shows", "Movie": "Movies" }
|
||||
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -24,7 +24,9 @@ A clear and concise description of what you expected to happen.
|
||||
If applicable, add logs to help explain your problem ideally with DEBUG set to true, be sure to remove sensitive information
|
||||
|
||||
**Type:**
|
||||
- [ ] Docker Compose
|
||||
- [ ] Docker
|
||||
- [ ] Unraid
|
||||
- [ ] Native
|
||||
|
||||
**Additional context**
|
||||
|
||||
95
.github/workflows/ci.yml
vendored
95
.github/workflows/ci.yml
vendored
@@ -1,5 +1,6 @@
|
||||
name: CI
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths-ignore:
|
||||
- .gitignore
|
||||
@@ -13,7 +14,7 @@ jobs:
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: "Install dependencies"
|
||||
run: pip install -r requirements.txt && pip install -r test/requirements.txt
|
||||
@@ -21,10 +22,71 @@ jobs:
|
||||
- name: "Run tests"
|
||||
run: pytest -vvv
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: "Install dependencies"
|
||||
run: |
|
||||
pip install -r requirements.txt
|
||||
sudo apt update && sudo apt install -y docker-compose
|
||||
|
||||
- name: "Checkout JellyPlex-Watched-CI"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: luigi311/JellyPlex-Watched-CI
|
||||
path: JellyPlex-Watched-CI
|
||||
|
||||
- name: "Start containers"
|
||||
run: |
|
||||
export PGID=$(id -g)
|
||||
export PUID=$(id -u)
|
||||
|
||||
sudo chown -R $PUID:$PGID JellyPlex-Watched-CI
|
||||
|
||||
docker pull lscr.io/linuxserver/plex &
|
||||
docker pull lscr.io/linuxserver/jellyfin &
|
||||
|
||||
wait
|
||||
|
||||
docker-compose -f JellyPlex-Watched-CI/plex/docker-compose.yml up -d
|
||||
docker-compose -f JellyPlex-Watched-CI/jellyfin/docker-compose.yml up -d
|
||||
# Wait for containers to start
|
||||
sleep 10
|
||||
|
||||
docker-compose -f JellyPlex-Watched-CI/plex/docker-compose.yml logs
|
||||
docker-compose -f JellyPlex-Watched-CI/jellyfin/docker-compose.yml logs
|
||||
|
||||
- name: "Run tests"
|
||||
run: |
|
||||
# Test ci1
|
||||
mv test/ci1.env .env
|
||||
python main.py
|
||||
|
||||
# Test ci2
|
||||
mv test/ci2.env .env
|
||||
python main.py
|
||||
|
||||
# Test ci3
|
||||
mv test/ci3.env .env
|
||||
python main.py
|
||||
|
||||
# Test again to test if it can handle existing data
|
||||
python main.py
|
||||
|
||||
cat mark.log
|
||||
python test/validate_ci_marklog.py
|
||||
|
||||
docker:
|
||||
runs-on: ubuntu-latest
|
||||
needs: pytest
|
||||
needs:
|
||||
- pytest
|
||||
- test
|
||||
env:
|
||||
DEFAULT_VARIANT: alpine
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- dockerfile: Dockerfile.alpine
|
||||
@@ -33,44 +95,49 @@ jobs:
|
||||
variant: slim
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Docker meta
|
||||
id: docker_meta
|
||||
uses: docker/metadata-action@v4
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ secrets.DOCKER_USERNAME }}/jellyplex-watched,enable=${{ secrets.DOCKER_USERNAME != '' }}
|
||||
# Do not push to ghcr.io on PRs due to permission issues
|
||||
ghcr.io/${{ github.repository }},enable=${{ github.event_name != 'pull_request' }}
|
||||
# Do not push to ghcr.io on PRs due to permission issues, only push if the owner is luigi311 so it doesnt fail on forks
|
||||
ghcr.io/${{ github.repository }},enable=${{ github.event_name != 'pull_request' && github.repository_owner == 'luigi311'}}
|
||||
tags: |
|
||||
type=raw,value=latest,enable=${{ matrix.variant == 'alpine' && github.ref_name == github.event.repository.default_branch }}
|
||||
type=raw,value=dev,enable=${{ matrix.variant == 'alpine' && github.ref_name == 'dev' }}
|
||||
type=raw,value=latest,enable=${{ matrix.variant == env.DEFAULT_VARIANT && github.ref_name == github.event.repository.default_branch }}
|
||||
type=raw,value=dev,enable=${{ matrix.variant == env.DEFAULT_VARIANT && github.ref_name == 'dev' }}
|
||||
type=raw,value=latest,suffix=-${{ matrix.variant }},enable={{ is_default_branch }}
|
||||
type=ref,event=branch,suffix=-${{ matrix.variant }}
|
||||
type=ref,event=branch,enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||
type=ref,event=pr,suffix=-${{ matrix.variant }}
|
||||
type=ref,event=pr,enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||
type=semver,pattern={{ version }},suffix=-${{ matrix.variant }}
|
||||
type=semver,pattern={{ version }},enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||
type=semver,pattern={{ major }}.{{ minor }},suffix=-${{ matrix.variant }}
|
||||
type=semver,pattern={{ major }}.{{ minor }},enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||
type=sha,suffix=-${{ matrix.variant }}
|
||||
type=sha,enable=${{ matrix.variant == env.DEFAULT_VARIANT }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
if: "${{ env.DOCKER_USERNAME != '' }}"
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: "${{ steps.docker_meta.outcome == 'success' }}"
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -79,7 +146,7 @@ jobs:
|
||||
- name: Build
|
||||
id: build
|
||||
if: "${{ steps.docker_meta.outputs.tags == '' }}"
|
||||
uses: docker/build-push-action@v3
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ${{ matrix.dockerfile }}
|
||||
@@ -90,7 +157,7 @@ jobs:
|
||||
- name: Build Push
|
||||
id: build_push
|
||||
if: "${{ steps.docker_meta.outputs.tags != '' }}"
|
||||
uses: docker/build-push-action@v3
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ${{ matrix.dockerfile }}
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,4 +1,4 @@
|
||||
.env
|
||||
**.env*
|
||||
*.prof
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
|
||||
11
.vscode/launch.json
vendored
11
.vscode/launch.json
vendored
@@ -11,6 +11,17 @@
|
||||
"program": "main.py",
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true
|
||||
},
|
||||
{
|
||||
"name": "Pytest",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"module": "pytest",
|
||||
"args": [
|
||||
"-vv"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
FROM python:3-alpine
|
||||
FROM python:3.11-alpine
|
||||
|
||||
ENV DRYRUN 'True'
|
||||
ENV DEBUG 'True'
|
||||
ENV DEBUG_LEVEL 'INFO'
|
||||
ENV RUN_ONLY_ONCE 'False'
|
||||
ENV SLEEP_DURATION '3600'
|
||||
ENV LOGFILE 'log.log'
|
||||
ENV MARKFILE 'mark.log'
|
||||
|
||||
ENV USER_MAPPING ''
|
||||
ENV LIBRARY_MAPPING ''
|
||||
@@ -30,12 +32,22 @@ ENV WHITELIST_LIBRARY_TYPE ''
|
||||
ENV BLACKLIST_USERS ''
|
||||
ENV WHITELIST_USERS ''
|
||||
|
||||
|
||||
RUN apk add --no-cache tini && \
|
||||
addgroup --system jellyplex_user && \
|
||||
adduser --system --no-create-home jellyplex_user --ingroup jellyplex_user && \
|
||||
mkdir -p /app && \
|
||||
chown -R jellyplex_user:jellyplex_user /app
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ./requirements.txt ./
|
||||
COPY --chown=jellyplex_user:jellyplex_user ./requirements.txt ./
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
COPY --chown=jellyplex_user:jellyplex_user . .
|
||||
|
||||
USER jellyplex_user
|
||||
|
||||
ENTRYPOINT ["/sbin/tini", "--"]
|
||||
CMD ["python", "-u", "main.py"]
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
FROM python:3-slim
|
||||
FROM python:3.11-slim
|
||||
|
||||
ENV DRYRUN 'True'
|
||||
ENV DEBUG 'True'
|
||||
ENV DEBUG_LEVEL 'INFO'
|
||||
ENV RUN_ONLY_ONCE 'False'
|
||||
ENV SLEEP_DURATION '3600'
|
||||
ENV LOGFILE 'log.log'
|
||||
ENV MARKFILE 'mark.log'
|
||||
|
||||
ENV USER_MAPPING ''
|
||||
ENV LIBRARY_MAPPING ''
|
||||
@@ -30,11 +32,25 @@ ENV WHITELIST_LIBRARY_TYPE ''
|
||||
ENV BLACKLIST_USERS ''
|
||||
ENV WHITELIST_USERS ''
|
||||
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install tini --yes --no-install-recommends && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
addgroup --system jellyplex_user && \
|
||||
adduser --system --no-create-home jellyplex_user --ingroup jellyplex_user && \
|
||||
mkdir -p /app && \
|
||||
chown -R jellyplex_user:jellyplex_user /app
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY ./requirements.txt ./
|
||||
COPY --chown=jellyplex_user:jellyplex_user ./requirements.txt ./
|
||||
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
COPY --chown=jellyplex_user:jellyplex_user . .
|
||||
|
||||
USER jellyplex_user
|
||||
|
||||
ENTRYPOINT ["/bin/tini", "--"]
|
||||
CMD ["python", "-u", "main.py"]
|
||||
|
||||
120
README.md
120
README.md
@@ -12,116 +12,37 @@ Keep in sync all your users watched history between jellyfin and plex servers lo
|
||||
|
||||
### Plex
|
||||
|
||||
* \[x] Match via Filenames
|
||||
* \[x] Match via filenames
|
||||
* \[x] Match via provider ids
|
||||
* \[x] Map usersnames
|
||||
* \[x] Map usernames
|
||||
* \[x] Use single login
|
||||
* \[x] One Way/Multi Way sync
|
||||
* \[x] Sync Watched
|
||||
* \[x] Sync Inprogress
|
||||
* \[x] One way/multi way sync
|
||||
* \[x] Sync watched
|
||||
* \[x] Sync in progress
|
||||
|
||||
### Jellyfin
|
||||
|
||||
* \[x] Match via Filenames
|
||||
* \[x] Match via filenames
|
||||
* \[x] Match via provider ids
|
||||
* \[x] Map usersnames
|
||||
* \[x] Map usernames
|
||||
* \[x] Use single login
|
||||
* \[x] One Way/Multi Way sync
|
||||
* \[x] Sync Watched
|
||||
* \[ ] Sync Inprogress
|
||||
* \[x] One way/multi way sync
|
||||
* \[x] Sync watched
|
||||
* \[ ] Sync in progress
|
||||
|
||||
### Emby
|
||||
|
||||
* \[ ] Match via Filenames
|
||||
* \[ ] Match via filenames
|
||||
* \[ ] Match via provider ids
|
||||
* \[ ] Map usersnames
|
||||
* \[ ] Map usernames
|
||||
* \[ ] Use single login
|
||||
* \[ ] One Way/Multi Way sync
|
||||
* \[ ] Sync Watched
|
||||
* \[ ] Sync Inprogress
|
||||
* \[ ] One way/multi way sync
|
||||
* \[ ] Sync watched
|
||||
* \[ ] Sync in progress
|
||||
|
||||
## Configuration
|
||||
|
||||
```bash
|
||||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "True"
|
||||
|
||||
## Additional logging information
|
||||
DEBUG = "False"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "info"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "False"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = "3600"
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOGFILE = "log.log"
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma separated for multiple options
|
||||
USER_MAPPING = { "testuser2": "testuser3", "testuser1":"testuser4" }
|
||||
|
||||
## Map libraries between servers in the even that they are different, order does not matter
|
||||
## Comma separated for multiple options
|
||||
LIBRARY_MAPPING = { "Shows": "TV Shows", "Movie": "Movies" }
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma separated for multiple options
|
||||
BLACKLIST_LIBRARY = ""
|
||||
WHITELIST_LIBRARY = ""
|
||||
BLACKLIST_LIBRARY_TYPE = ""
|
||||
WHITELIST_LIBRARY_TYPE = ""
|
||||
BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "testuser1,testuser2"
|
||||
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma separated list for multiple servers
|
||||
PLEX_BASEURL = "http://localhost:32400, https://nas:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma separated list for multiple servers
|
||||
PLEX_TOKEN = "SuperSecretToken, SuperSecretToken2"
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma separated for multiple options
|
||||
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "False"
|
||||
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma separated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096, http://nas:8096"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
## Comma separated list for multiple servers
|
||||
JELLYFIN_TOKEN = "SuperSecretToken, SuperSecretToken2"
|
||||
```
|
||||
Full list of configuration options can be found in the [.env.sample](.env.sample)
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -175,6 +96,15 @@ JELLYFIN_TOKEN = "SuperSecretToken, SuperSecretToken2"
|
||||
docker run --rm -it -v "$(pwd)/.env:/app/.env" luigi311/jellyplex-watched:latest
|
||||
```
|
||||
|
||||
## Troubleshooting/Issues
|
||||
|
||||
* Jellyfin
|
||||
* Attempt to decode JSON with unexpected mimetype, make sure you enable remote access or add your docker subnet to lan networks in jellyfin settings
|
||||
|
||||
* Configuration
|
||||
* Do not use quotes around variables in docker compose
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
I am open to receiving pull requests. If you are submitting a pull request, please make sure run it locally for a day or two to make sure it is working as expected and stable. Make all pull requests against the dev branch and nothing will be merged into the main without going through the lower branches.
|
||||
|
||||
@@ -12,7 +12,8 @@ services:
|
||||
- RUN_ONLY_ONCE=False
|
||||
- SLEEP_DURATION=3600
|
||||
- LOGFILE=/tmp/log.log
|
||||
- USER_MAPPING=
|
||||
- MARKFILE=/tmp/mark.log
|
||||
- USER_MAPPING={"user1":"user2"}
|
||||
- LIBRARY_MAPPING={"TV Shows":"Shows"}
|
||||
- BLACKLIST_LIBRARY=
|
||||
- WHITELIST_LIBRARY=
|
||||
@@ -20,10 +21,10 @@ services:
|
||||
- WHITELIST_LIBRARY_TYPE=
|
||||
- BLACKLIST_USERS=
|
||||
- WHITELIST_USERS=
|
||||
- PLEX_BASEURL=
|
||||
- PLEX_TOKEN=
|
||||
- JELLYFIN_BASEURL=
|
||||
- JELLYFIN_TOKEN=
|
||||
- PLEX_BASEURL=https://localhost:32400
|
||||
- PLEX_TOKEN=plex_token
|
||||
- JELLYFIN_BASEURL=http://localhost:8096
|
||||
- JELLYFIN_TOKEN=jelly_token
|
||||
- SSL_BYPASS=True
|
||||
- SYNC_FROM_PLEX_TO_JELLYFIN=True
|
||||
- SYNC_FROM_JELLYFIN_TO_PLEX=True
|
||||
|
||||
6
main.py
6
main.py
@@ -1,9 +1,9 @@
|
||||
import sys
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Check python version 3.6 or higher
|
||||
if not (3, 6) <= tuple(map(int, sys.version_info[:2])):
|
||||
print("This script requires Python 3.6 or higher")
|
||||
# Check python version 3.9 or higher
|
||||
if not (3, 9) <= tuple(map(int, sys.version_info[:2])):
|
||||
print("This script requires Python 3.9 or higher")
|
||||
sys.exit(1)
|
||||
|
||||
from src.main import main
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
PlexAPI==4.13.4
|
||||
requests==2.28.2
|
||||
PlexAPI==4.15.7
|
||||
requests==2.31.0
|
||||
python-dotenv==1.0.0
|
||||
aiohttp==3.8.4
|
||||
|
||||
@@ -15,7 +15,7 @@ def setup_black_white_lists(
|
||||
blacklist_library,
|
||||
blacklist_library_type,
|
||||
blacklist_users,
|
||||
"White",
|
||||
"Black",
|
||||
library_mapping,
|
||||
user_mapping,
|
||||
)
|
||||
@@ -24,7 +24,7 @@ def setup_black_white_lists(
|
||||
whitelist_library,
|
||||
whitelist_library_type,
|
||||
whitelist_users,
|
||||
"Black",
|
||||
"White",
|
||||
library_mapping,
|
||||
user_mapping,
|
||||
)
|
||||
@@ -38,6 +38,7 @@ def setup_black_white_lists(
|
||||
whitelist_users,
|
||||
)
|
||||
|
||||
|
||||
def setup_x_lists(
|
||||
xlist_library,
|
||||
xlist_library_type,
|
||||
@@ -89,42 +90,3 @@ def setup_x_lists(
|
||||
logger(f"{xlist_type}list Users: {xlist_users}", 1)
|
||||
|
||||
return xlist_library, xlist_library_type, xlist_users
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ from dotenv import load_dotenv
|
||||
load_dotenv(override=True)
|
||||
|
||||
logfile = os.getenv("LOGFILE", "log.log")
|
||||
markfile = os.getenv("MARKFILE", "mark.log")
|
||||
|
||||
|
||||
def logger(message: str, log_type=0):
|
||||
@@ -22,6 +23,10 @@ def logger(message: str, log_type=0):
|
||||
output = f"[DEBUG]: {output}"
|
||||
elif log_type == 4:
|
||||
output = f"[WARNING]: {output}"
|
||||
elif log_type == 5:
|
||||
output = f"[MARK]: {output}"
|
||||
elif log_type == 6:
|
||||
output = f"[DRYRUN]: {output}"
|
||||
else:
|
||||
output = None
|
||||
|
||||
@@ -31,6 +36,24 @@ def logger(message: str, log_type=0):
|
||||
file.write(output + "\n")
|
||||
|
||||
|
||||
def log_marked(
|
||||
username: str, library: str, movie_show: str, episode: str = None, duration=None
|
||||
):
|
||||
if markfile is None:
|
||||
return
|
||||
|
||||
output = f"{username}/{library}/{movie_show}"
|
||||
|
||||
if episode:
|
||||
output += f"/{episode}"
|
||||
|
||||
if duration:
|
||||
output += f"/{duration}"
|
||||
|
||||
file = open(f"{markfile}", "a", encoding="utf-8")
|
||||
file.write(output + "\n")
|
||||
|
||||
|
||||
# Reimplementation of distutils.util.strtobool due to it being deprecated
|
||||
# Source: https://github.com/PostHog/posthog/blob/01e184c29d2c10c43166f1d40a334abbc3f99d8a/posthog/utils.py#L668
|
||||
def str_to_bool(value: any) -> bool:
|
||||
@@ -41,9 +64,16 @@ def str_to_bool(value: any) -> bool:
|
||||
|
||||
# Search for nested element in list
|
||||
def contains_nested(element, lst):
|
||||
if lst is None:
|
||||
return None
|
||||
|
||||
for i, item in enumerate(lst):
|
||||
if item is None:
|
||||
continue
|
||||
if element in item:
|
||||
return i
|
||||
elif element == item:
|
||||
return i
|
||||
return None
|
||||
|
||||
|
||||
@@ -63,12 +93,25 @@ def search_mapping(dictionary: dict, key_value: str):
|
||||
return None
|
||||
|
||||
|
||||
def future_thread_executor(args: list, workers: int = -1):
|
||||
def future_thread_executor(
|
||||
args: list, threads: int = None, override_threads: bool = False
|
||||
):
|
||||
futures_list = []
|
||||
results = []
|
||||
|
||||
if workers == -1:
|
||||
workers = min(32, os.cpu_count() * 2)
|
||||
workers = min(int(os.getenv("MAX_THREADS", 32)), os.cpu_count() * 2)
|
||||
if threads:
|
||||
workers = min(threads, workers)
|
||||
|
||||
if override_threads:
|
||||
workers = threads
|
||||
|
||||
# If only one worker, run in main thread to avoid overhead
|
||||
if workers == 1:
|
||||
results = []
|
||||
for arg in args:
|
||||
results.append(arg[0](*arg[1:]))
|
||||
return results
|
||||
|
||||
with ThreadPoolExecutor(max_workers=workers) as executor:
|
||||
for arg in args:
|
||||
|
||||
600
src/jellyfin.py
600
src/jellyfin.py
@@ -1,7 +1,15 @@
|
||||
import asyncio, aiohttp, traceback
|
||||
import traceback, os
|
||||
from math import floor
|
||||
from dotenv import load_dotenv
|
||||
import requests
|
||||
|
||||
from src.functions import logger, search_mapping, contains_nested
|
||||
from src.functions import (
|
||||
logger,
|
||||
search_mapping,
|
||||
contains_nested,
|
||||
log_marked,
|
||||
str_to_bool,
|
||||
)
|
||||
from src.library import (
|
||||
check_skip_logic,
|
||||
generate_library_guids_dict,
|
||||
@@ -10,61 +18,98 @@ from src.watched import (
|
||||
combine_watched_dicts,
|
||||
)
|
||||
|
||||
load_dotenv(override=True)
|
||||
|
||||
def get_movie_guids(movie):
|
||||
if "ProviderIds" in movie:
|
||||
logger(
|
||||
f"Jellyfin: {movie['Name']} {movie['ProviderIds']} {movie['MediaSources']}",
|
||||
3,
|
||||
generate_guids = str_to_bool(os.getenv("GENERATE_GUIDS", "True"))
|
||||
generate_locations = str_to_bool(os.getenv("GENERATE_LOCATIONS", "True"))
|
||||
|
||||
|
||||
def get_guids(item):
|
||||
if item.get("Name"):
|
||||
guids = {"title": item.get("Name")}
|
||||
else:
|
||||
logger(f"Jellyfin: Name not found in {item.get('Id')}", 1)
|
||||
guids = {"title": None}
|
||||
|
||||
if "ProviderIds" in item:
|
||||
guids.update({k.lower(): v for k, v in item["ProviderIds"].items()})
|
||||
else:
|
||||
logger(f"Jellyfin: ProviderIds not found in {item.get('Name')}", 1)
|
||||
|
||||
if "MediaSources" in item:
|
||||
guids["locations"] = tuple(
|
||||
[x["Path"].split("/")[-1] for x in item["MediaSources"] if "Path" in x]
|
||||
)
|
||||
else:
|
||||
logger(
|
||||
f"Jellyfin: {movie['Name']} {movie['MediaSources']['Path']}",
|
||||
3,
|
||||
)
|
||||
logger(f"Jellyfin: MediaSources not found in {item.get('Name')}", 1)
|
||||
guids["locations"] = tuple()
|
||||
|
||||
# Create a dictionary for the movie with its title
|
||||
movie_guids = {"title": movie["Name"]}
|
||||
|
||||
# If the movie has provider IDs, add them to the dictionary
|
||||
if "ProviderIds" in movie:
|
||||
movie_guids.update({k.lower(): v for k, v in movie["ProviderIds"].items()})
|
||||
|
||||
# If the movie has media sources, add them to the dictionary
|
||||
if "MediaSources" in movie:
|
||||
movie_guids["locations"] = tuple(
|
||||
[x["Path"].split("/")[-1] for x in movie["MediaSources"]]
|
||||
)
|
||||
|
||||
movie_guids["status"] = {
|
||||
"completed": movie["UserData"]["Played"],
|
||||
if "UserData" in item:
|
||||
guids["status"] = {
|
||||
"completed": item["UserData"]["Played"],
|
||||
# Convert ticks to milliseconds to match Plex
|
||||
"time": floor(movie["UserData"]["PlaybackPositionTicks"] / 10000),
|
||||
"time": floor(item["UserData"]["PlaybackPositionTicks"] / 10000),
|
||||
}
|
||||
else:
|
||||
logger(f"Jellyfin: UserData not found in {item.get('Name')}", 1)
|
||||
guids["status"] = {}
|
||||
|
||||
return movie_guids
|
||||
return guids
|
||||
|
||||
|
||||
def get_episode_guids(episode):
|
||||
# Create a dictionary for the episode with its provider IDs and media sources
|
||||
episode_dict = {k.lower(): v for k, v in episode["ProviderIds"].items()}
|
||||
episode_dict["title"] = episode["Name"]
|
||||
episode_dict["locations"] = tuple(
|
||||
[x["Path"].split("/")[-1] for x in episode["MediaSources"]]
|
||||
def get_video_status(jellyfin_video, videos_ids, videos):
|
||||
video_status = None
|
||||
|
||||
if generate_locations:
|
||||
if "MediaSources" in jellyfin_video:
|
||||
for video_location in jellyfin_video["MediaSources"]:
|
||||
if "Path" in video_location:
|
||||
if (
|
||||
contains_nested(
|
||||
video_location["Path"].split("/")[-1],
|
||||
videos_ids["locations"],
|
||||
)
|
||||
is not None
|
||||
):
|
||||
for video in videos:
|
||||
if (
|
||||
contains_nested(
|
||||
video_location["Path"].split("/")[-1],
|
||||
video["locations"],
|
||||
)
|
||||
is not None
|
||||
):
|
||||
video_status = video["status"]
|
||||
break
|
||||
break
|
||||
|
||||
episode_dict["status"] = {
|
||||
"completed": episode["UserData"]["Played"],
|
||||
"time": floor(episode["UserData"]["PlaybackPositionTicks"] / 10000),
|
||||
}
|
||||
if generate_guids:
|
||||
if not video_status:
|
||||
for (
|
||||
video_provider_source,
|
||||
video_provider_id,
|
||||
) in jellyfin_video["ProviderIds"].items():
|
||||
if video_provider_source.lower() in videos_ids:
|
||||
if (
|
||||
video_provider_id.lower()
|
||||
in videos_ids[video_provider_source.lower()]
|
||||
):
|
||||
for video in videos:
|
||||
if video_provider_id.lower() in video.get(
|
||||
video_provider_source.lower(), []
|
||||
):
|
||||
video_status = video["status"]
|
||||
break
|
||||
break
|
||||
|
||||
return episode_dict
|
||||
return video_status
|
||||
|
||||
|
||||
class Jellyfin:
|
||||
def __init__(self, baseurl, token):
|
||||
self.baseurl = baseurl
|
||||
self.token = token
|
||||
self.timeout = int(os.getenv("REQUEST_TIMEOUT", 300))
|
||||
|
||||
if not self.baseurl:
|
||||
raise Exception("Jellyfin baseurl not set")
|
||||
@@ -72,12 +117,13 @@ class Jellyfin:
|
||||
if not self.token:
|
||||
raise Exception("Jellyfin token not set")
|
||||
|
||||
self.users = asyncio.run(self.get_users())
|
||||
self.session = requests.Session()
|
||||
self.users = self.get_users()
|
||||
|
||||
async def query(self, query, query_type, session, identifiers=None):
|
||||
def query(self, query, query_type, session=None, identifiers=None):
|
||||
try:
|
||||
results = None
|
||||
headers = {"Accept": "application/json", "X-Emby-Token": self.token}
|
||||
|
||||
authorization = (
|
||||
"MediaBrowser , "
|
||||
'Client="other", '
|
||||
@@ -85,27 +131,31 @@ class Jellyfin:
|
||||
'DeviceId="script", '
|
||||
'Version="0.0.0"'
|
||||
)
|
||||
headers["X-Emby-Authorization"] = authorization
|
||||
headers = {
|
||||
"Accept": "application/json",
|
||||
"X-Emby-Token": self.token,
|
||||
"X-Emby-Authorization": authorization,
|
||||
}
|
||||
|
||||
if query_type == "get":
|
||||
async with session.get(
|
||||
self.baseurl + query, headers=headers
|
||||
) as response:
|
||||
if response.status != 200:
|
||||
raise Exception(
|
||||
f"Query failed with status {response.status} {response.reason}"
|
||||
response = self.session.get(
|
||||
self.baseurl + query, headers=headers, timeout=self.timeout
|
||||
)
|
||||
results = await response.json()
|
||||
if response.status_code != 200:
|
||||
raise Exception(
|
||||
f"Query failed with status {response.status_code} {response.reason}"
|
||||
)
|
||||
results = response.json()
|
||||
|
||||
elif query_type == "post":
|
||||
async with session.post(
|
||||
self.baseurl + query, headers=headers
|
||||
) as response:
|
||||
if response.status != 200:
|
||||
raise Exception(
|
||||
f"Query failed with status {response.status} {response.reason}"
|
||||
response = self.session.post(
|
||||
self.baseurl + query, headers=headers, timeout=self.timeout
|
||||
)
|
||||
results = await response.json()
|
||||
if response.status_code != 200:
|
||||
raise Exception(
|
||||
f"Query failed with status {response.status_code} {response.reason}"
|
||||
)
|
||||
results = response.json()
|
||||
|
||||
if not isinstance(results, list) and not isinstance(results, dict):
|
||||
raise Exception("Query result is not of type list or dict")
|
||||
@@ -120,13 +170,27 @@ class Jellyfin:
|
||||
logger(f"Jellyfin: Query {query_type} {query}\nResults {results}\n{e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
async def get_users(self):
|
||||
def info(self) -> str:
|
||||
try:
|
||||
query_string = "/System/Info/Public"
|
||||
|
||||
response = self.query(query_string, "get")
|
||||
|
||||
if response:
|
||||
return f"{response['ServerName']}: {response['Version']}"
|
||||
else:
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Get server name failed {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def get_users(self):
|
||||
try:
|
||||
users = {}
|
||||
|
||||
query_string = "/Users"
|
||||
async with aiohttp.ClientSession() as session:
|
||||
response = await self.query(query_string, "get", session)
|
||||
response = self.query(query_string, "get")
|
||||
|
||||
# If response is not empty
|
||||
if response:
|
||||
@@ -138,7 +202,7 @@ class Jellyfin:
|
||||
logger(f"Jellyfin: Get users failed {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
async def get_user_library_watched(
|
||||
def get_user_library_watched(
|
||||
self, user_name, user_id, library_type, library_id, library_title
|
||||
):
|
||||
try:
|
||||
@@ -151,54 +215,38 @@ class Jellyfin:
|
||||
0,
|
||||
)
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
# Movies
|
||||
if library_type == "Movie":
|
||||
user_watched[user_name][library_title] = []
|
||||
watched = await self.query(
|
||||
watched = self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&Filters=IsPlayed&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||
"get",
|
||||
session,
|
||||
)
|
||||
|
||||
in_progress = await self.query(
|
||||
in_progress = self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&Filters=IsResumable&IncludeItemTypes=Movie&Recursive=True&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||
"get",
|
||||
session,
|
||||
)
|
||||
|
||||
for movie in watched["Items"]:
|
||||
for movie in watched["Items"] + in_progress["Items"]:
|
||||
if "MediaSources" in movie and movie["MediaSources"] != {}:
|
||||
logger(
|
||||
f"Jellyfin: Adding {movie['Name']} to {user_name} watched list",
|
||||
3,
|
||||
)
|
||||
|
||||
# Get the movie's GUIDs
|
||||
movie_guids = get_movie_guids(movie)
|
||||
|
||||
# Append the movie dictionary to the list for the given user and library
|
||||
user_watched[user_name][library_title].append(movie_guids)
|
||||
logger(
|
||||
f"Jellyfin: Added {movie_guids} to {user_name} watched list",
|
||||
3,
|
||||
)
|
||||
|
||||
# Get all partially watched movies greater than 1 minute
|
||||
for movie in in_progress["Items"]:
|
||||
if "MediaSources" in movie and movie["MediaSources"] != {}:
|
||||
if movie["UserData"]["PlaybackPositionTicks"] < 600000000:
|
||||
if "UserData" not in movie:
|
||||
continue
|
||||
|
||||
# Skip if not watched or watched less than a minute
|
||||
if (
|
||||
movie["UserData"]["Played"] == True
|
||||
or movie["UserData"]["PlaybackPositionTicks"] > 600000000
|
||||
):
|
||||
logger(
|
||||
f"Jellyfin: Adding {movie['Name']} to {user_name} watched list",
|
||||
f"Jellyfin: Adding {movie.get('Name')} to {user_name} watched list",
|
||||
3,
|
||||
)
|
||||
|
||||
# Get the movie's GUIDs
|
||||
movie_guids = get_movie_guids(movie)
|
||||
movie_guids = get_guids(movie)
|
||||
|
||||
# Append the movie dictionary to the list for the given user and library
|
||||
user_watched[user_name][library_title].append(movie_guids)
|
||||
@@ -213,51 +261,50 @@ class Jellyfin:
|
||||
user_watched[user_name][library_title] = {}
|
||||
|
||||
# Retrieve a list of watched TV shows
|
||||
watched_shows = await self.query(
|
||||
watched_shows = self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&isPlaceHolder=false&IncludeItemTypes=Series&Recursive=True&Fields=ProviderIds,Path,RecursiveItemCount",
|
||||
"get",
|
||||
session,
|
||||
)
|
||||
|
||||
# Filter the list of shows to only include those that have been partially or fully watched
|
||||
watched_shows_filtered = []
|
||||
for show in watched_shows["Items"]:
|
||||
if not "UserData" in show:
|
||||
continue
|
||||
|
||||
if "PlayedPercentage" in show["UserData"]:
|
||||
if show["UserData"]["PlayedPercentage"] > 0:
|
||||
watched_shows_filtered.append(show)
|
||||
|
||||
# Create a list of tasks to retrieve the seasons of each watched show
|
||||
seasons_tasks = []
|
||||
# Retrieve the seasons of each watched show
|
||||
seasons_watched = []
|
||||
for show in watched_shows_filtered:
|
||||
logger(
|
||||
f"Jellyfin: Adding {show['Name']} to {user_name} watched list",
|
||||
f"Jellyfin: Adding {show.get('Name')} to {user_name} watched list",
|
||||
3,
|
||||
)
|
||||
show_guids = {
|
||||
k.lower(): v for k, v in show["ProviderIds"].items()
|
||||
}
|
||||
show_guids = {k.lower(): v for k, v in show["ProviderIds"].items()}
|
||||
show_guids["title"] = show["Name"]
|
||||
show_guids["locations"] = tuple([show["Path"].split("/")[-1]])
|
||||
show_guids["locations"] = (
|
||||
tuple([show["Path"].split("/")[-1]])
|
||||
if "Path" in show
|
||||
else tuple()
|
||||
)
|
||||
show_guids = frozenset(show_guids.items())
|
||||
show_identifiers = {
|
||||
"show_guids": show_guids,
|
||||
"show_id": show["Id"],
|
||||
}
|
||||
|
||||
season_task = asyncio.ensure_future(
|
||||
seasons_watched.append(
|
||||
self.query(
|
||||
f"/Shows/{show['Id']}/Seasons"
|
||||
+ f"?userId={user_id}&isPlaceHolder=false&Fields=ProviderIds,RecursiveItemCount",
|
||||
"get",
|
||||
session,
|
||||
frozenset(show_identifiers.items()),
|
||||
identifiers=frozenset(show_identifiers.items()),
|
||||
)
|
||||
)
|
||||
seasons_tasks.append(season_task)
|
||||
|
||||
# Retrieve the seasons for each watched show
|
||||
seasons_watched = await asyncio.gather(*seasons_tasks)
|
||||
|
||||
# Filter the list of seasons to only include those that have been partially or fully watched
|
||||
seasons_watched_filtered = []
|
||||
@@ -270,46 +317,40 @@ class Jellyfin:
|
||||
for season in seasons["Items"]:
|
||||
if "PlayedPercentage" in season["UserData"]:
|
||||
if season["UserData"]["PlayedPercentage"] > 0:
|
||||
seasons_watched_filtered_dict["Items"].append(
|
||||
season
|
||||
)
|
||||
seasons_watched_filtered_dict["Items"].append(season)
|
||||
|
||||
if seasons_watched_filtered_dict["Items"]:
|
||||
seasons_watched_filtered.append(
|
||||
seasons_watched_filtered_dict
|
||||
)
|
||||
seasons_watched_filtered.append(seasons_watched_filtered_dict)
|
||||
|
||||
# Create a list of tasks to retrieve the episodes of each watched season
|
||||
episodes_tasks = []
|
||||
watched_episodes = []
|
||||
for seasons in seasons_watched_filtered:
|
||||
if len(seasons["Items"]) > 0:
|
||||
for season in seasons["Items"]:
|
||||
if "IndexNumber" not in season:
|
||||
logger(
|
||||
f"Jellyfin: Skipping show {season.get('SeriesName')} season {season.get('Name')} as it has no index number",
|
||||
3,
|
||||
)
|
||||
|
||||
continue
|
||||
season_identifiers = dict(seasons["Identifiers"])
|
||||
season_identifiers["season_id"] = season["Id"]
|
||||
season_identifiers["season_name"] = season["Name"]
|
||||
watched_task = asyncio.ensure_future(
|
||||
self.query(
|
||||
season_identifiers["season_index"] = season["IndexNumber"]
|
||||
watched_task = self.query(
|
||||
f"/Shows/{season_identifiers['show_id']}/Episodes"
|
||||
+ f"?seasonId={season['Id']}&userId={user_id}&isPlaceHolder=false&Filters=IsPlayed&Fields=ProviderIds,MediaSources",
|
||||
"get",
|
||||
session,
|
||||
frozenset(season_identifiers.items()),
|
||||
identifiers=frozenset(season_identifiers.items()),
|
||||
)
|
||||
)
|
||||
in_progress_task = asyncio.ensure_future(
|
||||
self.query(
|
||||
|
||||
in_progress_task = self.query(
|
||||
f"/Shows/{season_identifiers['show_id']}/Episodes"
|
||||
+ f"?seasonId={season['Id']}&userId={user_id}&isPlaceHolder=false&Filters=IsResumable&Fields=ProviderIds,MediaSources",
|
||||
"get",
|
||||
session,
|
||||
frozenset(season_identifiers.items()),
|
||||
identifiers=frozenset(season_identifiers.items()),
|
||||
)
|
||||
)
|
||||
episodes_tasks.append(watched_task)
|
||||
episodes_tasks.append(in_progress_task)
|
||||
|
||||
# Retrieve the episodes for each watched season
|
||||
watched_episodes = await asyncio.gather(*episodes_tasks)
|
||||
watched_episodes.append(watched_task)
|
||||
watched_episodes.append(in_progress_task)
|
||||
|
||||
# Iterate through the watched episodes
|
||||
for episodes in watched_episodes:
|
||||
@@ -322,7 +363,7 @@ class Jellyfin:
|
||||
for episode in episodes["Items"]:
|
||||
if (
|
||||
"MediaSources" in episode
|
||||
and episode["MediaSources"] is not {}
|
||||
and episode["MediaSources"] != {}
|
||||
):
|
||||
# If watched or watched more than a minute
|
||||
if (
|
||||
@@ -330,7 +371,7 @@ class Jellyfin:
|
||||
or episode["UserData"]["PlaybackPositionTicks"]
|
||||
> 600000000
|
||||
):
|
||||
episode_dict = get_episode_guids(episode)
|
||||
episode_dict = get_guids(episode)
|
||||
# Add the episode dictionary to the season's list of episodes
|
||||
season_dict["Episodes"].append(episode_dict)
|
||||
|
||||
@@ -344,18 +385,18 @@ class Jellyfin:
|
||||
] = {}
|
||||
|
||||
if (
|
||||
season_dict["Identifiers"]["season_name"]
|
||||
season_dict["Identifiers"]["season_index"]
|
||||
not in user_watched[user_name][library_title][
|
||||
season_dict["Identifiers"]["show_guids"]
|
||||
]
|
||||
):
|
||||
user_watched[user_name][library_title][
|
||||
season_dict["Identifiers"]["show_guids"]
|
||||
][season_dict["Identifiers"]["season_name"]] = []
|
||||
][season_dict["Identifiers"]["season_index"]] = []
|
||||
|
||||
user_watched[user_name][library_title][
|
||||
season_dict["Identifiers"]["show_guids"]
|
||||
][season_dict["Identifiers"]["season_name"]] = season_dict[
|
||||
][season_dict["Identifiers"]["season_index"]] = season_dict[
|
||||
"Episodes"
|
||||
]
|
||||
logger(
|
||||
@@ -379,7 +420,7 @@ class Jellyfin:
|
||||
logger(traceback.format_exc(), 2)
|
||||
return {}
|
||||
|
||||
async def get_users_watched(
|
||||
def get_users_watched(
|
||||
self,
|
||||
user_name,
|
||||
user_id,
|
||||
@@ -392,44 +433,38 @@ class Jellyfin:
|
||||
try:
|
||||
# Get all libraries
|
||||
user_name = user_name.lower()
|
||||
tasks_watched = []
|
||||
watched = []
|
||||
|
||||
tasks_libraries = []
|
||||
async with aiohttp.ClientSession() as session:
|
||||
libraries = await self.query(f"/Users/{user_id}/Views", "get", session)
|
||||
for library in libraries["Items"]:
|
||||
libraries = []
|
||||
|
||||
all_libraries = self.query(f"/Users/{user_id}/Views", "get")
|
||||
for library in all_libraries["Items"]:
|
||||
library_id = library["Id"]
|
||||
library_title = library["Name"]
|
||||
identifiers = {
|
||||
"library_id": library_id,
|
||||
"library_title": library_title,
|
||||
}
|
||||
task = asyncio.ensure_future(
|
||||
libraries.append(
|
||||
self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?ParentId={library_id}&Filters=IsPlayed&Recursive=True&excludeItemTypes=Folder&limit=100",
|
||||
"get",
|
||||
session,
|
||||
identifiers=identifiers,
|
||||
)
|
||||
)
|
||||
tasks_libraries.append(task)
|
||||
|
||||
libraries = await asyncio.gather(
|
||||
*tasks_libraries, return_exceptions=True
|
||||
)
|
||||
|
||||
for watched in libraries:
|
||||
if len(watched["Items"]) == 0:
|
||||
for library in libraries:
|
||||
if len(library["Items"]) == 0:
|
||||
continue
|
||||
|
||||
library_id = watched["Identifiers"]["library_id"]
|
||||
library_title = watched["Identifiers"]["library_title"]
|
||||
library_id = library["Identifiers"]["library_id"]
|
||||
library_title = library["Identifiers"]["library_title"]
|
||||
# Get all library types excluding "Folder"
|
||||
types = set(
|
||||
[
|
||||
x["Type"]
|
||||
for x in watched["Items"]
|
||||
for x in library["Items"]
|
||||
if x["Type"] in ["Movie", "Series", "Episode"]
|
||||
]
|
||||
)
|
||||
@@ -453,7 +488,7 @@ class Jellyfin:
|
||||
|
||||
# If there are multiple types in library raise error
|
||||
if types is None or len(types) < 1:
|
||||
all_types = set([x["Type"] for x in watched["Items"]])
|
||||
all_types = set([x["Type"] for x in library["Items"]])
|
||||
logger(
|
||||
f"Jellyfin: Skipping Library {library_title} found types: {types}, all types: {all_types}",
|
||||
1,
|
||||
@@ -462,7 +497,7 @@ class Jellyfin:
|
||||
|
||||
for library_type in types:
|
||||
# Get watched for user
|
||||
task = asyncio.ensure_future(
|
||||
watched.append(
|
||||
self.get_user_library_watched(
|
||||
user_name,
|
||||
user_id,
|
||||
@@ -471,16 +506,13 @@ class Jellyfin:
|
||||
library_title,
|
||||
)
|
||||
)
|
||||
tasks_watched.append(task)
|
||||
|
||||
watched = await asyncio.gather(*tasks_watched, return_exceptions=True)
|
||||
|
||||
return watched
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Failed to get users watched, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
async def get_watched(
|
||||
def get_watched(
|
||||
self,
|
||||
users,
|
||||
blacklist_library,
|
||||
@@ -506,7 +538,6 @@ class Jellyfin:
|
||||
)
|
||||
)
|
||||
|
||||
watched = await asyncio.gather(*watched, return_exceptions=True)
|
||||
for user_watched in watched:
|
||||
user_watched_combine = combine_watched_dicts(user_watched)
|
||||
for user, user_watched_temp in user_watched_combine.items():
|
||||
@@ -519,7 +550,7 @@ class Jellyfin:
|
||||
logger(f"Jellyfin: Failed to get watched, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
async def update_user_watched(
|
||||
def update_user_watched(
|
||||
self, user_name, user_id, library, library_id, videos, dryrun
|
||||
):
|
||||
try:
|
||||
@@ -532,106 +563,91 @@ class Jellyfin:
|
||||
videos_movies_ids,
|
||||
) = generate_library_guids_dict(videos)
|
||||
|
||||
if (
|
||||
not videos_movies_ids
|
||||
and not videos_shows_ids
|
||||
and not videos_episodes_ids
|
||||
):
|
||||
logger(
|
||||
f"Jellyfin: No videos to mark as watched for {user_name} in library {library}",
|
||||
1,
|
||||
)
|
||||
|
||||
return
|
||||
|
||||
logger(
|
||||
f"Jellyfin: mark list\nShows: {videos_shows_ids}\nEpisodes: {videos_episodes_ids}\nMovies: {videos_movies_ids}",
|
||||
1,
|
||||
)
|
||||
async with aiohttp.ClientSession() as session:
|
||||
|
||||
if videos_movies_ids:
|
||||
jellyfin_search = await self.query(
|
||||
jellyfin_search = self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=True&ParentId={library_id}"
|
||||
+ "&isPlayed=false&Fields=ItemCounts,ProviderIds,MediaSources&IncludeItemTypes=Movie",
|
||||
"get",
|
||||
session,
|
||||
)
|
||||
for jellyfin_video in jellyfin_search["Items"]:
|
||||
movie_status = None
|
||||
|
||||
if "MediaSources" in jellyfin_video:
|
||||
for movie_location in jellyfin_video["MediaSources"]:
|
||||
if (
|
||||
contains_nested(
|
||||
movie_location["Path"].split("/")[-1],
|
||||
videos_movies_ids["locations"],
|
||||
movie_status = get_video_status(
|
||||
jellyfin_video, videos_movies_ids, videos
|
||||
)
|
||||
is not None
|
||||
):
|
||||
for video in videos:
|
||||
if (
|
||||
contains_nested(
|
||||
movie_location["Path"].split("/")[-1],
|
||||
video["locations"],
|
||||
)
|
||||
is not None
|
||||
):
|
||||
movie_status = video["status"]
|
||||
break
|
||||
break
|
||||
|
||||
if not movie_status:
|
||||
for (
|
||||
movie_provider_source,
|
||||
movie_provider_id,
|
||||
) in jellyfin_video["ProviderIds"].items():
|
||||
if movie_provider_source.lower() in videos_movies_ids:
|
||||
if (
|
||||
movie_provider_id.lower()
|
||||
in videos_movies_ids[
|
||||
movie_provider_source.lower()
|
||||
]
|
||||
):
|
||||
for video in videos:
|
||||
if (
|
||||
movie_provider_id.lower()
|
||||
in video[movie_provider_source.lower()]
|
||||
):
|
||||
movie_status = video["status"]
|
||||
break
|
||||
break
|
||||
|
||||
if movie_status:
|
||||
jellyfin_video_id = jellyfin_video["Id"]
|
||||
if movie_status["completed"]:
|
||||
msg = f"{jellyfin_video['Name']} as watched for {user_name} in {library} for Jellyfin"
|
||||
msg = f"Jellyfin: {jellyfin_video.get('Name')} as watched for {user_name} in {library}"
|
||||
if not dryrun:
|
||||
logger(f"Marking {msg}", 0)
|
||||
await self.query(
|
||||
logger(msg, 5)
|
||||
self.query(
|
||||
f"/Users/{user_id}/PlayedItems/{jellyfin_video_id}",
|
||||
"post",
|
||||
session,
|
||||
)
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
logger(msg, 6)
|
||||
|
||||
log_marked(
|
||||
user_name,
|
||||
library,
|
||||
jellyfin_video.get("Name"),
|
||||
)
|
||||
else:
|
||||
# TODO add support for partially watched movies
|
||||
msg = f"{jellyfin_video['Name']} as partially watched for {floor(movie_status['time'] / 60_000)} minutes for {user_name} in {library} for Jellyfin"
|
||||
msg = f"Jellyfin: {jellyfin_video.get('Name')} as partially watched for {floor(movie_status['time'] / 60_000)} minutes for {user_name} in {library}"
|
||||
"""
|
||||
if not dryrun:
|
||||
pass
|
||||
# logger(f"Marked {msg}", 0)
|
||||
# logger(msg, 5)
|
||||
else:
|
||||
pass
|
||||
# logger(f"Dryrun {msg}", 0)
|
||||
# logger(msg, 6)
|
||||
|
||||
log_marked(
|
||||
user_name,
|
||||
library,
|
||||
jellyfin_video.get("Name"),
|
||||
duration=floor(movie_status["time"] / 60_000),
|
||||
)"""
|
||||
else:
|
||||
logger(
|
||||
f"Jellyfin: Skipping movie {jellyfin_video['Name']} as it is not in mark list for {user_name}",
|
||||
1,
|
||||
f"Jellyfin: Skipping movie {jellyfin_video.get('Name')} as it is not in mark list for {user_name}",
|
||||
3,
|
||||
)
|
||||
|
||||
# TV Shows
|
||||
if videos_shows_ids and videos_episodes_ids:
|
||||
jellyfin_search = await self.query(
|
||||
jellyfin_search = self.query(
|
||||
f"/Users/{user_id}/Items"
|
||||
+ f"?SortBy=SortName&SortOrder=Ascending&Recursive=True&ParentId={library_id}"
|
||||
+ "&isPlayed=false&Fields=ItemCounts,ProviderIds,Path&IncludeItemTypes=Series",
|
||||
+ "&Fields=ItemCounts,ProviderIds,Path&IncludeItemTypes=Series",
|
||||
"get",
|
||||
session,
|
||||
)
|
||||
jellyfin_shows = [x for x in jellyfin_search["Items"]]
|
||||
|
||||
for jellyfin_show in jellyfin_shows:
|
||||
show_found = False
|
||||
episode_videos = []
|
||||
|
||||
if generate_locations:
|
||||
if "Path" in jellyfin_show:
|
||||
if (
|
||||
contains_nested(
|
||||
@@ -641,10 +657,8 @@ class Jellyfin:
|
||||
is not None
|
||||
):
|
||||
show_found = True
|
||||
episode_videos = []
|
||||
|
||||
for show, seasons in videos.items():
|
||||
show = {k: v for k, v in show}
|
||||
for shows, seasons in videos.items():
|
||||
show = {k: v for k, v in shows}
|
||||
if (
|
||||
contains_nested(
|
||||
jellyfin_show["Path"].split("/")[-1],
|
||||
@@ -656,6 +670,9 @@ class Jellyfin:
|
||||
for episode in season:
|
||||
episode_videos.append(episode)
|
||||
|
||||
break
|
||||
|
||||
if generate_guids:
|
||||
if not show_found:
|
||||
for show_provider_source, show_provider_id in jellyfin_show[
|
||||
"ProviderIds"
|
||||
@@ -668,134 +685,86 @@ class Jellyfin:
|
||||
]
|
||||
):
|
||||
show_found = True
|
||||
episode_videos = []
|
||||
for show, seasons in videos.items():
|
||||
show = {k: v for k, v in show}
|
||||
if (
|
||||
show_provider_id.lower()
|
||||
in show[show_provider_source.lower()]
|
||||
if show_provider_id.lower() in show.get(
|
||||
show_provider_source.lower(), []
|
||||
):
|
||||
for season in seasons.values():
|
||||
for episode in season:
|
||||
episode_videos.append(episode)
|
||||
|
||||
break
|
||||
|
||||
if show_found:
|
||||
logger(
|
||||
f"Jellyfin: Updating watched for {user_name} in library {library} for show {jellyfin_show['Name']}",
|
||||
f"Jellyfin: Updating watched for {user_name} in library {library} for show {jellyfin_show.get('Name')}",
|
||||
1,
|
||||
)
|
||||
jellyfin_show_id = jellyfin_show["Id"]
|
||||
jellyfin_episodes = await self.query(
|
||||
jellyfin_episodes = self.query(
|
||||
f"/Shows/{jellyfin_show_id}/Episodes"
|
||||
+ f"?userId={user_id}&Fields=ItemCounts,ProviderIds,MediaSources",
|
||||
"get",
|
||||
session,
|
||||
)
|
||||
|
||||
for jellyfin_episode in jellyfin_episodes["Items"]:
|
||||
episode_status = None
|
||||
|
||||
if "MediaSources" in jellyfin_episode:
|
||||
for episode_location in jellyfin_episode[
|
||||
"MediaSources"
|
||||
]:
|
||||
if (
|
||||
contains_nested(
|
||||
episode_location["Path"].split("/")[-1],
|
||||
videos_episodes_ids["locations"],
|
||||
episode_status = get_video_status(
|
||||
jellyfin_episode, videos_episodes_ids, episode_videos
|
||||
)
|
||||
is not None
|
||||
):
|
||||
for episode in episode_videos:
|
||||
if (
|
||||
contains_nested(
|
||||
episode_location["Path"].split(
|
||||
"/"
|
||||
)[-1],
|
||||
episode["locations"],
|
||||
)
|
||||
is not None
|
||||
):
|
||||
episode_status = episode["status"]
|
||||
break
|
||||
break
|
||||
|
||||
if not episode_status:
|
||||
for (
|
||||
episode_provider_source,
|
||||
episode_provider_id,
|
||||
) in jellyfin_episode["ProviderIds"].items():
|
||||
if (
|
||||
episode_provider_source.lower()
|
||||
in videos_episodes_ids
|
||||
):
|
||||
if (
|
||||
episode_provider_id.lower()
|
||||
in videos_episodes_ids[
|
||||
episode_provider_source.lower()
|
||||
]
|
||||
):
|
||||
for episode in episode_videos:
|
||||
if (
|
||||
episode_provider_id.lower()
|
||||
in episode[
|
||||
episode_provider_source.lower()
|
||||
]
|
||||
):
|
||||
episode_status = episode[
|
||||
"status"
|
||||
]
|
||||
break
|
||||
break
|
||||
|
||||
if episode_status:
|
||||
if episode_status["completed"]:
|
||||
jellyfin_episode_id = jellyfin_episode["Id"]
|
||||
if episode_status["completed"]:
|
||||
msg = (
|
||||
f"{jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode['IndexNumber']} {jellyfin_episode['Name']}"
|
||||
+ f" as watched for {user_name} in {library} for Jellyfin"
|
||||
f"Jellyfin: {jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode.get('IndexNumber')} {jellyfin_episode.get('Name')}"
|
||||
+ f" as watched for {user_name} in {library}"
|
||||
)
|
||||
if not dryrun:
|
||||
logger(f"Marked {msg}", 0)
|
||||
await self.query(
|
||||
logger(msg, 5)
|
||||
self.query(
|
||||
f"/Users/{user_id}/PlayedItems/{jellyfin_episode_id}",
|
||||
"post",
|
||||
session,
|
||||
)
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
logger(msg, 6)
|
||||
|
||||
log_marked(
|
||||
user_name,
|
||||
library,
|
||||
jellyfin_episode.get("SeriesName"),
|
||||
jellyfin_episode.get("Name"),
|
||||
)
|
||||
else:
|
||||
# TODO add support for partially watched episodes
|
||||
jellyfin_episode_id = jellyfin_episode["Id"]
|
||||
msg = (
|
||||
f"{jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode['IndexNumber']} {jellyfin_episode['Name']}"
|
||||
+ f" as partially watched for {floor(episode_status['time'] / 60_000)} minutes for {user_name} in {library} for Jellyfin"
|
||||
f"Jellyfin: {jellyfin_episode['SeriesName']} {jellyfin_episode['SeasonName']} Episode {jellyfin_episode.get('IndexNumber')} {jellyfin_episode.get('Name')}"
|
||||
+ f" as partially watched for {floor(episode_status['time'] / 60_000)} minutes for {user_name} in {library}"
|
||||
)
|
||||
"""
|
||||
if not dryrun:
|
||||
pass
|
||||
# logger(f"Marked {msg}", 0)
|
||||
else:
|
||||
pass
|
||||
# logger(f"Dryrun {msg}", 0)
|
||||
else:
|
||||
logger(
|
||||
f"Jellyfin: Skipping episode {jellyfin_episode['Name']} as it is not in mark list for {user_name}",
|
||||
3,
|
||||
)
|
||||
else:
|
||||
logger(
|
||||
f"Jellyfin: Skipping show {jellyfin_show['Name']} as it is not in mark list for {user_name}",
|
||||
3,
|
||||
)
|
||||
|
||||
if (
|
||||
not videos_movies_ids
|
||||
and not videos_shows_ids
|
||||
and not videos_episodes_ids
|
||||
):
|
||||
log_marked(
|
||||
user_name,
|
||||
library,
|
||||
jellyfin_episode.get("SeriesName"),
|
||||
jellyfin_episode.get('Name'),
|
||||
duration=floor(episode_status["time"] / 60_000),
|
||||
)"""
|
||||
else:
|
||||
logger(
|
||||
f"Jellyfin: No videos to mark as watched for {user_name} in library {library}",
|
||||
1,
|
||||
f"Jellyfin: Skipping episode {jellyfin_episode.get('Name')} as it is not in mark list for {user_name}",
|
||||
3,
|
||||
)
|
||||
else:
|
||||
logger(
|
||||
f"Jellyfin: Skipping show {jellyfin_show.get('Name')} as it is not in mark list for {user_name}",
|
||||
3,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
@@ -806,12 +775,10 @@ class Jellyfin:
|
||||
logger(traceback.format_exc(), 2)
|
||||
raise Exception(e)
|
||||
|
||||
async def update_watched(
|
||||
def update_watched(
|
||||
self, watched_list, user_mapping=None, library_mapping=None, dryrun=False
|
||||
):
|
||||
try:
|
||||
tasks = []
|
||||
async with aiohttp.ClientSession() as session:
|
||||
for user, libraries in watched_list.items():
|
||||
logger(f"Jellyfin: Updating for entry {user}, {libraries}", 1)
|
||||
user_other = None
|
||||
@@ -823,7 +790,7 @@ class Jellyfin:
|
||||
user_other = search_mapping(user_mapping, user)
|
||||
|
||||
user_id = None
|
||||
for key in self.users.keys():
|
||||
for key in self.users:
|
||||
if user.lower() == key.lower():
|
||||
user_id = self.users[key]
|
||||
user_name = key
|
||||
@@ -837,8 +804,9 @@ class Jellyfin:
|
||||
logger(f"{user} {user_other} not found in Jellyfin", 2)
|
||||
continue
|
||||
|
||||
jellyfin_libraries = await self.query(
|
||||
f"/Users/{user_id}/Views", "get", session
|
||||
jellyfin_libraries = self.query(
|
||||
f"/Users/{user_id}/Views",
|
||||
"get",
|
||||
)
|
||||
jellyfin_libraries = [x for x in jellyfin_libraries["Items"]]
|
||||
|
||||
@@ -882,12 +850,10 @@ class Jellyfin:
|
||||
continue
|
||||
|
||||
if library_id:
|
||||
task = self.update_user_watched(
|
||||
self.update_user_watched(
|
||||
user_name, user_id, library, library_id, videos, dryrun
|
||||
)
|
||||
tasks.append(task)
|
||||
|
||||
await asyncio.gather(*tasks, return_exceptions=True)
|
||||
except Exception as e:
|
||||
logger(f"Jellyfin: Error updating watched, {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
@@ -158,7 +158,7 @@ def show_title_dict(user_list: dict):
|
||||
|
||||
return show_output_dict
|
||||
except Exception:
|
||||
logger("Generating show_output_dict failed, skipping", 1)
|
||||
logger("Skipping show_output_dict ", 1)
|
||||
return {}
|
||||
|
||||
|
||||
@@ -168,12 +168,28 @@ def episode_title_dict(user_list: dict):
|
||||
episode_output_dict["completed"] = []
|
||||
episode_output_dict["time"] = []
|
||||
episode_output_dict["locations"] = []
|
||||
episode_output_dict["show"] = []
|
||||
episode_output_dict["season"] = []
|
||||
episode_counter = 0 # Initialize a counter for the current episode position
|
||||
|
||||
# Iterate through the shows, seasons, and episodes in user_list
|
||||
for show in user_list:
|
||||
for season in user_list[show]:
|
||||
for episode in user_list[show][season]:
|
||||
# Add the show title to the episode_output_dict if it doesn't exist
|
||||
if "show" not in episode_output_dict:
|
||||
episode_output_dict["show"] = [None] * episode_counter
|
||||
|
||||
# Add the season number to the episode_output_dict if it doesn't exist
|
||||
if "season" not in episode_output_dict:
|
||||
episode_output_dict["season"] = [None] * episode_counter
|
||||
|
||||
# Add the show title to the episode_output_dict
|
||||
episode_output_dict["show"].append(dict(show))
|
||||
|
||||
# Add the season number to the episode_output_dict
|
||||
episode_output_dict["season"].append(season)
|
||||
|
||||
# Iterate through the keys and values in each episode
|
||||
for episode_key, episode_value in episode.items():
|
||||
# If the key is not "status", add the key to episode_output_dict if it doesn't exist
|
||||
@@ -213,7 +229,7 @@ def episode_title_dict(user_list: dict):
|
||||
|
||||
return episode_output_dict
|
||||
except Exception:
|
||||
logger("Generating episode_output_dict failed, skipping", 1)
|
||||
logger("Skipping episode_output_dict", 1)
|
||||
return {}
|
||||
|
||||
|
||||
@@ -246,7 +262,7 @@ def movies_title_dict(user_list: dict):
|
||||
|
||||
return movies_output_dict
|
||||
except Exception:
|
||||
logger("Generating movies_output_dict failed, skipping", 1)
|
||||
logger("Skipping movies_output_dict failed", 1)
|
||||
return {}
|
||||
|
||||
|
||||
|
||||
48
src/main.py
48
src/main.py
@@ -1,4 +1,4 @@
|
||||
import os, traceback, json, asyncio
|
||||
import os, traceback, json
|
||||
from dotenv import load_dotenv
|
||||
from time import sleep, perf_counter
|
||||
|
||||
@@ -28,6 +28,8 @@ def setup_users(
|
||||
):
|
||||
server_1_users = generate_user_list(server_1)
|
||||
server_2_users = generate_user_list(server_2)
|
||||
logger(f"Server 1 users: {server_1_users}", 1)
|
||||
logger(f"Server 2 users: {server_2_users}", 1)
|
||||
|
||||
users = combine_user_lists(server_1_users, server_2_users, user_mapping)
|
||||
logger(f"User list that exist on both servers {users}", 1)
|
||||
@@ -83,17 +85,21 @@ def generate_server_connections():
|
||||
)
|
||||
|
||||
for i, url in enumerate(plex_baseurl):
|
||||
servers.append(
|
||||
(
|
||||
"plex",
|
||||
Plex(
|
||||
server = Plex(
|
||||
baseurl=url.strip(),
|
||||
token=plex_token[i].strip(),
|
||||
username=None,
|
||||
password=None,
|
||||
servername=None,
|
||||
ssl_bypass=ssl_bypass,
|
||||
),
|
||||
)
|
||||
|
||||
logger(f"Plex Server {i} info: {server.info()}", 3)
|
||||
|
||||
servers.append(
|
||||
(
|
||||
"plex",
|
||||
server,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -110,17 +116,20 @@ def generate_server_connections():
|
||||
)
|
||||
|
||||
for i, username in enumerate(plex_username):
|
||||
servers.append(
|
||||
(
|
||||
"plex",
|
||||
Plex(
|
||||
server = Plex(
|
||||
baseurl=None,
|
||||
token=None,
|
||||
username=username.strip(),
|
||||
password=plex_password[i].strip(),
|
||||
servername=plex_servername[i].strip(),
|
||||
ssl_bypass=ssl_bypass,
|
||||
),
|
||||
)
|
||||
|
||||
logger(f"Plex Server {i} info: {server.info()}", 3)
|
||||
servers.append(
|
||||
(
|
||||
"plex",
|
||||
server,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -140,10 +149,14 @@ def generate_server_connections():
|
||||
baseurl = baseurl.strip()
|
||||
if baseurl[-1] == "/":
|
||||
baseurl = baseurl[:-1]
|
||||
|
||||
server = Jellyfin(baseurl=baseurl, token=jellyfin_token[i].strip())
|
||||
|
||||
logger(f"Jellyfin Server {i} info: {server.info()}", 3)
|
||||
servers.append(
|
||||
(
|
||||
"jellyfin",
|
||||
Jellyfin(baseurl=baseurl, token=jellyfin_token[i].strip()),
|
||||
server,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -169,8 +182,7 @@ def get_server_watched(
|
||||
library_mapping,
|
||||
)
|
||||
elif server_connection[0] == "jellyfin":
|
||||
return asyncio.run(
|
||||
server_connection[1].get_watched(
|
||||
return server_connection[1].get_watched(
|
||||
users,
|
||||
blacklist_library,
|
||||
whitelist_library,
|
||||
@@ -178,7 +190,6 @@ def get_server_watched(
|
||||
whitelist_library_type,
|
||||
library_mapping,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def update_server_watched(
|
||||
@@ -193,11 +204,9 @@ def update_server_watched(
|
||||
server_watched_filtered, user_mapping, library_mapping, dryrun
|
||||
)
|
||||
elif server_connection[0] == "jellyfin":
|
||||
asyncio.run(
|
||||
server_connection[1].update_watched(
|
||||
server_watched_filtered, user_mapping, library_mapping, dryrun
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def should_sync_server(server_1_type, server_2_type):
|
||||
@@ -304,6 +313,9 @@ def main_loop():
|
||||
|
||||
# Start server_2 at the next server in the list
|
||||
for server_2 in servers[servers.index(server_1) + 1 :]:
|
||||
logger(f"Server 1: {server_1[0].capitalize()}: {server_1[1].info()}", 0)
|
||||
logger(f"Server 2: {server_2[0].capitalize()}: {server_2[1].info()}", 0)
|
||||
|
||||
# Create users list
|
||||
logger("Creating users list", 1)
|
||||
server_1_users, server_2_users = setup_users(
|
||||
@@ -408,5 +420,7 @@ def main():
|
||||
sleep(sleep_duration)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
if len(times) > 0:
|
||||
logger(f"Average time: {sum(times) / len(times)}", 0)
|
||||
logger("Exiting", log_type=0)
|
||||
os._exit(0)
|
||||
|
||||
346
src/plex.py
346
src/plex.py
@@ -1,7 +1,13 @@
|
||||
import re, requests, os, traceback
|
||||
import os, requests, traceback
|
||||
from dotenv import load_dotenv
|
||||
from typing import Dict, Union, FrozenSet
|
||||
|
||||
from urllib3.poolmanager import PoolManager
|
||||
from math import floor
|
||||
|
||||
from requests.adapters import HTTPAdapter as RequestsHTTPAdapter
|
||||
|
||||
from plexapi.video import Show, Episode, Movie
|
||||
from plexapi.server import PlexServer
|
||||
from plexapi.myplex import MyPlexAccount
|
||||
|
||||
@@ -10,6 +16,8 @@ from src.functions import (
|
||||
search_mapping,
|
||||
future_thread_executor,
|
||||
contains_nested,
|
||||
log_marked,
|
||||
str_to_bool,
|
||||
)
|
||||
from src.library import (
|
||||
check_skip_logic,
|
||||
@@ -17,8 +25,14 @@ from src.library import (
|
||||
)
|
||||
|
||||
|
||||
load_dotenv(override=True)
|
||||
|
||||
generate_guids = str_to_bool(os.getenv("GENERATE_GUIDS", "True"))
|
||||
generate_locations = str_to_bool(os.getenv("GENERATE_LOCATIONS", "True"))
|
||||
|
||||
|
||||
# Bypass hostname validation for ssl. Taken from https://github.com/pkkid/python-plexapi/issues/143#issuecomment-775485186
|
||||
class HostNameIgnoringAdapter(requests.adapters.HTTPAdapter):
|
||||
class HostNameIgnoringAdapter(RequestsHTTPAdapter):
|
||||
def init_poolmanager(self, connections, maxsize, block=..., **pool_kwargs):
|
||||
self.poolmanager = PoolManager(
|
||||
num_pools=connections,
|
||||
@@ -29,107 +43,94 @@ class HostNameIgnoringAdapter(requests.adapters.HTTPAdapter):
|
||||
)
|
||||
|
||||
|
||||
def get_movie_guids(video, completed=True):
|
||||
logger(f"Plex: {video.title} {video.guids} {video.locations}", 3)
|
||||
def extract_guids_from_item(item: Union[Movie, Show, Episode]) -> Dict[str, str]:
|
||||
# If GENERATE_GUIDS is set to False, then return an empty dict
|
||||
if not generate_guids:
|
||||
return {}
|
||||
|
||||
movie_guids = {}
|
||||
try:
|
||||
for guid in video.guids:
|
||||
# Extract source and id from guid.id
|
||||
m = re.match(r"(.*)://(.*)", guid.id)
|
||||
guid_source, guid_id = m.group(1).lower(), m.group(2)
|
||||
movie_guids[guid_source] = guid_id
|
||||
except Exception:
|
||||
logger(f"Plex: Failed to get guids for {video.title}, Using location only", 1)
|
||||
guids: Dict[str, str] = dict(
|
||||
guid.id.split("://")
|
||||
for guid in item.guids
|
||||
if guid.id is not None and len(guid.id.strip()) > 0
|
||||
)
|
||||
|
||||
movie_guids["title"] = video.title
|
||||
movie_guids["locations"] = tuple([x.split("/")[-1] for x in video.locations])
|
||||
|
||||
movie_guids["status"] = {
|
||||
"completed": completed,
|
||||
"time": video.viewOffset,
|
||||
}
|
||||
|
||||
return movie_guids
|
||||
|
||||
|
||||
def get_episode_guids(episode, show, completed=True):
|
||||
episode_guids_temp = {}
|
||||
try:
|
||||
for guid in episode.guids:
|
||||
# Extract after :// from guid.id
|
||||
m = re.match(r"(.*)://(.*)", guid.id)
|
||||
guid_source, guid_id = m.group(1).lower(), m.group(2)
|
||||
episode_guids_temp[guid_source] = guid_id
|
||||
except Exception:
|
||||
if len(guids) == 0:
|
||||
logger(
|
||||
f"Plex: Failed to get guids for {episode.title} in {show.title}, Using location only",
|
||||
f"Plex: Failed to get any guids for {item.title}",
|
||||
1,
|
||||
)
|
||||
|
||||
episode_guids_temp["title"] = episode.title
|
||||
episode_guids_temp["locations"] = tuple(
|
||||
[x.split("/")[-1] for x in episode.locations]
|
||||
)
|
||||
|
||||
episode_guids_temp["status"] = {
|
||||
"completed": completed,
|
||||
"time": episode.viewOffset,
|
||||
}
|
||||
|
||||
return episode_guids_temp
|
||||
return guids
|
||||
|
||||
|
||||
def get_user_library_watched_show(show):
|
||||
try:
|
||||
show_guids = {}
|
||||
try:
|
||||
for show_guid in show.guids:
|
||||
# Extract source and id from guid.id
|
||||
m = re.match(r"(.*)://(.*)", show_guid.id)
|
||||
show_guid_source, show_guid_id = m.group(1).lower(), m.group(2)
|
||||
show_guids[show_guid_source] = show_guid_id
|
||||
except Exception:
|
||||
def get_guids(item: Union[Movie, Episode], completed=True):
|
||||
if not item.locations:
|
||||
logger(
|
||||
f"Plex: Failed to get guids for {show.title}, Using location only", 1
|
||||
f"Plex: {item.title} has no locations",
|
||||
1,
|
||||
)
|
||||
|
||||
show_guids["title"] = show.title
|
||||
show_guids["locations"] = tuple([x.split("/")[-1] for x in show.locations])
|
||||
show_guids = frozenset(show_guids.items())
|
||||
if not item.guids:
|
||||
logger(
|
||||
f"Plex: {item.title} has no guids",
|
||||
1,
|
||||
)
|
||||
|
||||
return {
|
||||
"title": item.title,
|
||||
"locations": (
|
||||
tuple([location.split("/")[-1] for location in item.locations])
|
||||
if generate_locations
|
||||
else tuple()
|
||||
),
|
||||
"status": {
|
||||
"completed": completed,
|
||||
"time": item.viewOffset,
|
||||
},
|
||||
} | extract_guids_from_item(
|
||||
item
|
||||
) # Merge the metadata and guid dictionaries
|
||||
|
||||
|
||||
def get_user_library_watched_show(show, process_episodes, threads=None):
|
||||
try:
|
||||
show_guids: FrozenSet = frozenset(
|
||||
(
|
||||
{
|
||||
"title": show.title,
|
||||
"locations": (
|
||||
tuple([location.split("/")[-1] for location in show.locations])
|
||||
if generate_locations
|
||||
else tuple()
|
||||
),
|
||||
}
|
||||
| extract_guids_from_item(show)
|
||||
).items() # Merge the metadata and guid dictionaries
|
||||
)
|
||||
|
||||
episode_guids_args = []
|
||||
|
||||
for episode in process_episodes:
|
||||
episode_guids_args.append([get_guids, episode, episode.isWatched])
|
||||
|
||||
episode_guids_results = future_thread_executor(
|
||||
episode_guids_args, threads=threads
|
||||
)
|
||||
|
||||
# Get all watched episodes for show
|
||||
episode_guids = {}
|
||||
watched = show.watched()
|
||||
|
||||
for episode in show.episodes():
|
||||
if episode in watched:
|
||||
if episode.parentTitle not in episode_guids:
|
||||
episode_guids[episode.parentTitle] = []
|
||||
|
||||
episode_guids[episode.parentTitle].append(
|
||||
get_episode_guids(episode, show, completed=True)
|
||||
)
|
||||
elif episode.viewOffset > 0:
|
||||
if episode.parentTitle not in episode_guids:
|
||||
episode_guids[episode.parentTitle] = []
|
||||
|
||||
episode_guids[episode.parentTitle].append(
|
||||
get_episode_guids(episode, show, completed=False)
|
||||
)
|
||||
for index, episode in enumerate(process_episodes):
|
||||
if episode.parentIndex not in episode_guids:
|
||||
episode_guids[episode.parentIndex] = []
|
||||
episode_guids[episode.parentIndex].append(episode_guids_results[index])
|
||||
|
||||
return show_guids, episode_guids
|
||||
|
||||
except Exception:
|
||||
return {}, {}
|
||||
|
||||
|
||||
def get_user_library_watched(user, user_plex, library):
|
||||
user_name: str = user.username.lower() if user.username else user.title.lower()
|
||||
try:
|
||||
user_name = user.username.lower() if user.username else user.title.lower()
|
||||
user_watched = {}
|
||||
user_watched[user_name] = {}
|
||||
|
||||
logger(
|
||||
f"Plex: Generating watched for {user_name} in library {library.title}",
|
||||
0,
|
||||
@@ -138,60 +139,66 @@ def get_user_library_watched(user, user_plex, library):
|
||||
library_videos = user_plex.library.section(library.title)
|
||||
|
||||
if library.type == "movie":
|
||||
user_watched[user_name][library.title] = []
|
||||
watched = []
|
||||
|
||||
# Get all watched movies
|
||||
for video in library_videos.search(unwatched=False):
|
||||
logger(f"Plex: Adding {video.title} to {user_name} watched list", 3)
|
||||
|
||||
movie_guids = get_movie_guids(video, completed=True)
|
||||
|
||||
user_watched[user_name][library.title].append(movie_guids)
|
||||
|
||||
# Get all partially watched movies greater than 1 minute
|
||||
for video in library_videos.search(inProgress=True):
|
||||
if video.viewOffset < 60000:
|
||||
continue
|
||||
|
||||
logger(f"Plex: Adding {video.title} to {user_name} watched list", 3)
|
||||
|
||||
movie_guids = get_movie_guids(video, completed=False)
|
||||
|
||||
user_watched[user_name][library.title].append(movie_guids)
|
||||
args = [
|
||||
[get_guids, video, video.isWatched]
|
||||
for video in library_videos.search(unwatched=False)
|
||||
+ library_videos.search(inProgress=True)
|
||||
if video.isWatched or video.viewOffset >= 60000
|
||||
]
|
||||
|
||||
for guid in future_thread_executor(args, threads=len(args)):
|
||||
logger(f"Plex: Adding {guid['title']} to {user_name} watched list", 3)
|
||||
watched.append(guid)
|
||||
elif library.type == "show":
|
||||
user_watched[user_name][library.title] = {}
|
||||
watched = {}
|
||||
|
||||
# Parallelize show processing
|
||||
args = []
|
||||
# Get all watched shows and partially watched shows
|
||||
parallel_show_task = []
|
||||
parallel_episodes_task = []
|
||||
|
||||
# Get all watched shows
|
||||
for show in library_videos.search(unwatched=False):
|
||||
args.append([get_user_library_watched_show, show])
|
||||
for show in library_videos.search(unwatched=False) + library_videos.search(
|
||||
inProgress=True
|
||||
):
|
||||
process_episodes = []
|
||||
for episode in show.episodes():
|
||||
if episode.isWatched or episode.viewOffset >= 60000:
|
||||
process_episodes.append(episode)
|
||||
|
||||
# Get all partially watched shows
|
||||
for show in library_videos.search(inProgress=True):
|
||||
args.append([get_user_library_watched_show, show])
|
||||
# Shows with more than 24 episodes has its episodes processed in parallel
|
||||
# Shows with less than 24 episodes has its episodes processed in serial but the shows are processed in parallel
|
||||
if len(process_episodes) >= 24:
|
||||
parallel_episodes_task.append(
|
||||
[
|
||||
get_user_library_watched_show,
|
||||
show,
|
||||
process_episodes,
|
||||
len(process_episodes),
|
||||
]
|
||||
)
|
||||
else:
|
||||
parallel_show_task.append(
|
||||
[get_user_library_watched_show, show, process_episodes, 1]
|
||||
)
|
||||
|
||||
for show_guids, episode_guids in future_thread_executor(
|
||||
args, workers=min(os.cpu_count(), 4)
|
||||
):
|
||||
parallel_show_task, threads=len(parallel_show_task)
|
||||
) + future_thread_executor(parallel_episodes_task, threads=1):
|
||||
if show_guids and episode_guids:
|
||||
# append show, season, episode
|
||||
if show_guids not in user_watched[user_name][library.title]:
|
||||
user_watched[user_name][library.title][show_guids] = {}
|
||||
|
||||
user_watched[user_name][library.title][show_guids] = episode_guids
|
||||
watched[show_guids] = episode_guids
|
||||
logger(
|
||||
f"Plex: Added {episode_guids} to {user_name} {show_guids} watched list",
|
||||
3,
|
||||
)
|
||||
|
||||
logger(f"Plex: Got watched for {user_name} in library {library.title}", 1)
|
||||
if library.title in user_watched[user_name]:
|
||||
logger(f"Plex: {user_watched[user_name][library.title]}", 3)
|
||||
else:
|
||||
watched = None
|
||||
|
||||
return user_watched
|
||||
logger(f"Plex: Got watched for {user_name} in library {library.title}", 1)
|
||||
logger(f"Plex: {watched}", 3)
|
||||
|
||||
return {user_name: {library.title: watched} if watched is not None else {}}
|
||||
except Exception as e:
|
||||
logger(
|
||||
f"Plex: Failed to get watched for {user_name} in library {library.title}, Error: {e}",
|
||||
@@ -202,6 +209,10 @@ def get_user_library_watched(user, user_plex, library):
|
||||
|
||||
def find_video(plex_search, video_ids, videos=None):
|
||||
try:
|
||||
if not generate_guids and not generate_locations:
|
||||
return False, []
|
||||
|
||||
if generate_locations:
|
||||
for location in plex_search.locations:
|
||||
if (
|
||||
contains_nested(location.split("/")[-1], video_ids["locations"])
|
||||
@@ -212,7 +223,9 @@ def find_video(plex_search, video_ids, videos=None):
|
||||
for show, seasons in videos.items():
|
||||
show = {k: v for k, v in show}
|
||||
if (
|
||||
contains_nested(location.split("/")[-1], show["locations"])
|
||||
contains_nested(
|
||||
location.split("/")[-1], show["locations"]
|
||||
)
|
||||
is not None
|
||||
):
|
||||
for season in seasons.values():
|
||||
@@ -221,9 +234,9 @@ def find_video(plex_search, video_ids, videos=None):
|
||||
|
||||
return True, episode_videos
|
||||
|
||||
if generate_guids:
|
||||
for guid in plex_search.guids:
|
||||
guid_source = re.search(r"(.*)://", guid.id).group(1).lower()
|
||||
guid_id = re.search(r"://(.*)", guid.id).group(1)
|
||||
guid_source, guid_id = guid.id.split("://")
|
||||
|
||||
# If show provider source and show provider id are in videos_shows_ids exactly, then the show is in the list
|
||||
if guid_source in video_ids.keys():
|
||||
@@ -232,9 +245,9 @@ def find_video(plex_search, video_ids, videos=None):
|
||||
if videos:
|
||||
for show, seasons in videos.items():
|
||||
show = {k: v for k, v in show}
|
||||
if guid_source in show["ids"].keys():
|
||||
if guid_id in show["ids"][guid_source]:
|
||||
for season in seasons:
|
||||
if guid_source in show.keys():
|
||||
if guid_id == show[guid_source]:
|
||||
for season in seasons.values():
|
||||
for episode in season:
|
||||
episode_videos.append(episode)
|
||||
|
||||
@@ -247,6 +260,10 @@ def find_video(plex_search, video_ids, videos=None):
|
||||
|
||||
def get_video_status(plex_search, video_ids, videos):
|
||||
try:
|
||||
if not generate_guids and not generate_locations:
|
||||
return None
|
||||
|
||||
if generate_locations:
|
||||
for location in plex_search.locations:
|
||||
if (
|
||||
contains_nested(location.split("/")[-1], video_ids["locations"])
|
||||
@@ -259,16 +276,16 @@ def get_video_status(plex_search, video_ids, videos):
|
||||
):
|
||||
return video["status"]
|
||||
|
||||
if generate_guids:
|
||||
for guid in plex_search.guids:
|
||||
guid_source = re.search(r"(.*)://", guid.id).group(1).lower()
|
||||
guid_id = re.search(r"://(.*)", guid.id).group(1)
|
||||
guid_source, guid_id = guid.id.split("://")
|
||||
|
||||
# If show provider source and show provider id are in videos_shows_ids exactly, then the show is in the list
|
||||
if guid_source in video_ids.keys():
|
||||
if guid_id in video_ids[guid_source]:
|
||||
for video in videos:
|
||||
if guid_source in video["ids"].keys():
|
||||
if guid_id in video["ids"][guid_source]:
|
||||
if guid_source in video.keys():
|
||||
if guid_id == video[guid_source]:
|
||||
return video["status"]
|
||||
|
||||
return None
|
||||
@@ -297,19 +314,28 @@ def update_user_watched(user, user_plex, library, videos, dryrun):
|
||||
)
|
||||
if video_status:
|
||||
if video_status["completed"]:
|
||||
msg = f"{movies_search.title} as watched for {user.title} in {library} for Plex"
|
||||
msg = f"Plex: {movies_search.title} as watched for {user.title} in {library}"
|
||||
if not dryrun:
|
||||
logger(f"Marked {msg}", 0)
|
||||
logger(msg, 5)
|
||||
movies_search.markWatched()
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
logger(msg, 6)
|
||||
|
||||
log_marked(user.title, library, movies_search.title, None, None)
|
||||
elif video_status["time"] > 60_000:
|
||||
msg = f"{movies_search.title} as partially watched for {floor(video_status['time'] / 60_000)} minutes for {user.title} in {library} for Plex"
|
||||
msg = f"Plex: {movies_search.title} as partially watched for {floor(video_status['time'] / 60_000)} minutes for {user.title} in {library}"
|
||||
if not dryrun:
|
||||
logger(f"Marked {msg}", 0)
|
||||
movies_search.updateProgress(video_status["time"])
|
||||
logger(msg, 5)
|
||||
movies_search.updateTimeline(video_status["time"])
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
logger(msg, 6)
|
||||
|
||||
log_marked(
|
||||
user.title,
|
||||
library,
|
||||
movies_search.title,
|
||||
duration=video_status["time"],
|
||||
)
|
||||
else:
|
||||
logger(
|
||||
f"Plex: Skipping movie {movies_search.title} as it is not in mark list for {user.title}",
|
||||
@@ -328,19 +354,34 @@ def update_user_watched(user, user_plex, library, videos, dryrun):
|
||||
)
|
||||
if video_status:
|
||||
if video_status["completed"]:
|
||||
msg = f"{show_search.title} {episode_search.title} as watched for {user.title} in {library} for Plex"
|
||||
msg = f"Plex: {show_search.title} {episode_search.title} as watched for {user.title} in {library}"
|
||||
if not dryrun:
|
||||
logger(f"Marked {msg}", 0)
|
||||
logger(msg, 5)
|
||||
episode_search.markWatched()
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
logger(msg, 6)
|
||||
|
||||
log_marked(
|
||||
user.title,
|
||||
library,
|
||||
show_search.title,
|
||||
episode_search.title,
|
||||
)
|
||||
else:
|
||||
msg = f"{show_search.title} {episode_search.title} as partially watched for {floor(video_status['time'] / 60_000)} minutes for {user.title} in {library} for Plex"
|
||||
msg = f"Plex: {show_search.title} {episode_search.title} as partially watched for {floor(video_status['time'] / 60_000)} minutes for {user.title} in {library}"
|
||||
if not dryrun:
|
||||
logger(f"Marked {msg}", 0)
|
||||
episode_search.updateProgress(video_status["time"])
|
||||
logger(msg, 5)
|
||||
episode_search.updateTimeline(video_status["time"])
|
||||
else:
|
||||
logger(f"Dryrun {msg}", 0)
|
||||
logger(msg, 6)
|
||||
|
||||
log_marked(
|
||||
user.title,
|
||||
library,
|
||||
show_search.title,
|
||||
episode_search.title,
|
||||
video_status["time"],
|
||||
)
|
||||
else:
|
||||
logger(
|
||||
f"Plex: Skipping episode {episode_search.title} as it is not in mark list for {user.title}",
|
||||
@@ -414,6 +455,9 @@ class Plex:
|
||||
logger(f"Plex: Failed to login, Error: {e}", 2)
|
||||
raise Exception(e)
|
||||
|
||||
def info(self) -> str:
|
||||
return f"{self.plex.friendlyName}: {self.plex.version}"
|
||||
|
||||
def get_users(self):
|
||||
try:
|
||||
users = self.plex.myPlexAccount().users()
|
||||
@@ -438,7 +482,6 @@ class Plex:
|
||||
try:
|
||||
# Get all libraries
|
||||
users_watched = {}
|
||||
args = []
|
||||
|
||||
for user in users:
|
||||
if self.admin_user == user:
|
||||
@@ -480,13 +523,12 @@ class Plex:
|
||||
)
|
||||
continue
|
||||
|
||||
args.append([get_user_library_watched, user, user_plex, library])
|
||||
user_watched = get_user_library_watched(user, user_plex, library)
|
||||
|
||||
for user_watched in future_thread_executor(args):
|
||||
for user, user_watched_temp in user_watched.items():
|
||||
if user not in users_watched:
|
||||
users_watched[user] = {}
|
||||
users_watched[user].update(user_watched_temp)
|
||||
for user_watched, user_watched_temp in user_watched.items():
|
||||
if user_watched not in users_watched:
|
||||
users_watched[user_watched] = {}
|
||||
users_watched[user_watched].update(user_watched_temp)
|
||||
|
||||
return users_watched
|
||||
except Exception as e:
|
||||
|
||||
@@ -6,11 +6,17 @@ from src.library import generate_library_guids_dict
|
||||
|
||||
|
||||
def combine_watched_dicts(dicts: list):
|
||||
# Ensure that the input is a list of dictionaries
|
||||
if not all(isinstance(d, dict) for d in dicts):
|
||||
raise ValueError("Input must be a list of dictionaries")
|
||||
|
||||
combined_dict = {}
|
||||
|
||||
for single_dict in dicts:
|
||||
for key, value in single_dict.items():
|
||||
if key not in combined_dict:
|
||||
combined_dict[key] = {}
|
||||
|
||||
for subkey, subvalue in value.items():
|
||||
if subkey in combined_dict[key]:
|
||||
# If the subkey already exists in the combined dictionary,
|
||||
@@ -117,11 +123,18 @@ def cleanup_watched(
|
||||
show_key_dict = dict(show_key_1)
|
||||
|
||||
for season in watched_list_1[user_1][library_1][show_key_1]:
|
||||
# Filter the episode_watched_list_2_keys_dict dictionary to handle cases
|
||||
# where episode location names are not unique such as S01E01.mkv
|
||||
filtered_episode_watched_list_2_keys_dict = (
|
||||
filter_episode_watched_list_2_keys_dict(
|
||||
episode_watched_list_2_keys_dict, show_key_dict, season
|
||||
)
|
||||
)
|
||||
for episode in watched_list_1[user_1][library_1][show_key_1][
|
||||
season
|
||||
]:
|
||||
episode_index = get_episode_index_in_dict(
|
||||
episode, episode_watched_list_2_keys_dict
|
||||
episode, filtered_episode_watched_list_2_keys_dict
|
||||
)
|
||||
if episode_index is not None:
|
||||
if check_remove_entry(
|
||||
@@ -217,6 +230,70 @@ def get_movie_index_in_dict(movie, movies_watched_list_2_keys_dict):
|
||||
return None
|
||||
|
||||
|
||||
def filter_episode_watched_list_2_keys_dict(
|
||||
episode_watched_list_2_keys_dict, show_key_dict, season
|
||||
):
|
||||
# If the episode_watched_list_2_keys_dict dictionary is empty, missing season or show then return an empty dictionary
|
||||
if (
|
||||
len(episode_watched_list_2_keys_dict) == 0
|
||||
or "season" not in episode_watched_list_2_keys_dict.keys()
|
||||
or "show" not in episode_watched_list_2_keys_dict.keys()
|
||||
):
|
||||
return {}
|
||||
|
||||
# Filter the episode_watched_list_2_keys_dict dictionary to only include values for the correct show and season
|
||||
filtered_episode_watched_list_2_keys_dict = {}
|
||||
show_indecies = []
|
||||
season_indecies = []
|
||||
|
||||
# Iterate through episode_watched_list_2_keys_dict["season"] and find the indecies that match season
|
||||
for season_index, season_value in enumerate(
|
||||
episode_watched_list_2_keys_dict.get("season")
|
||||
):
|
||||
if season_value == season:
|
||||
season_indecies.append(season_index)
|
||||
|
||||
# Iterate through episode_watched_list_2_keys_dict["show"] and find the indecies that match show_key_dict
|
||||
for show_index, show_value in enumerate(episode_watched_list_2_keys_dict["show"]):
|
||||
# Iterate through the keys and values of the show_value dictionary and check if they match show_key_dict
|
||||
for show_key, show_key_value in show_value.items():
|
||||
if show_key == "locations":
|
||||
# Iterate through the locations in the show_value dictionary
|
||||
for location in show_key_value:
|
||||
# If the location is in the episode_watched_list_2_keys_dict dictionary, return index of the key
|
||||
if (
|
||||
contains_nested(location, show_key_dict["locations"])
|
||||
is not None
|
||||
):
|
||||
show_indecies.append(show_index)
|
||||
break
|
||||
else:
|
||||
if show_key in show_key_dict.keys():
|
||||
if show_key_value == show_key_dict[show_key]:
|
||||
show_indecies.append(show_index)
|
||||
break
|
||||
|
||||
# Find the intersection of the show_indecies and season_indecies lists
|
||||
indecies = list(set(show_indecies) & set(season_indecies))
|
||||
|
||||
# If there are no indecies that match the show and season, return an empty dictionary
|
||||
if len(indecies) == 0:
|
||||
return {}
|
||||
|
||||
# Create a copy of the dictionary with indecies that match the show and season and none that don't
|
||||
for key, value in episode_watched_list_2_keys_dict.items():
|
||||
if key not in filtered_episode_watched_list_2_keys_dict:
|
||||
filtered_episode_watched_list_2_keys_dict[key] = []
|
||||
|
||||
for index, _ in enumerate(value):
|
||||
if index in indecies:
|
||||
filtered_episode_watched_list_2_keys_dict[key].append(value[index])
|
||||
else:
|
||||
filtered_episode_watched_list_2_keys_dict[key].append(None)
|
||||
|
||||
return filtered_episode_watched_list_2_keys_dict
|
||||
|
||||
|
||||
def get_episode_index_in_dict(episode, episode_watched_list_2_keys_dict):
|
||||
# Iterate through the keys and values of the episode dictionary
|
||||
for episode_key, episode_value in episode.items():
|
||||
|
||||
96
test/ci1.env
Normal file
96
test/ci1.env
Normal file
@@ -0,0 +1,96 @@
|
||||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "True"
|
||||
|
||||
## Additional logging information
|
||||
DEBUG = "True"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "debug"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "True"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = 10
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOG_FILE = "log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 2
|
||||
|
||||
## Generate guids
|
||||
## Generating guids is a slow process, so this is a way to speed up the process
|
||||
# by using the location only, useful when using same files on multiple servers
|
||||
GENERATE_GUIDS = "False"
|
||||
|
||||
## Generate locations
|
||||
## Generating locations is a slow process, so this is a way to speed up the process
|
||||
## by using the guid only, useful when using different files on multiple servers
|
||||
GENERATE_LOCATIONS = "True"
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
USER_MAPPING = {"JellyUser":"jellyplex_watched"}
|
||||
|
||||
## Map libraries between servers in the even that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma seperated for multiple options
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = "Movies"
|
||||
#BLACKLIST_LIBRARY_TYPE = "Series"
|
||||
#WHITELIST_LIBRARY_TYPE = "Movies, movie"
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "jellyplex_watched"
|
||||
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "https://localhost:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_TOKEN = "mVaCzSyd78uoWkCBzZ_Y"
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma seperated for multiple options
|
||||
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "True"
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||
96
test/ci2.env
Normal file
96
test/ci2.env
Normal file
@@ -0,0 +1,96 @@
|
||||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "True"
|
||||
|
||||
## Additional logging information
|
||||
DEBUG = "True"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "debug"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "True"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = 10
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOG_FILE = "log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 2
|
||||
|
||||
## Generate guids
|
||||
## Generating guids is a slow process, so this is a way to speed up the process
|
||||
# by using the location only, useful when using same files on multiple servers
|
||||
GENERATE_GUIDS = "True"
|
||||
|
||||
## Generate locations
|
||||
## Generating locations is a slow process, so this is a way to speed up the process
|
||||
## by using the guid only, useful when using different files on multiple servers
|
||||
GENERATE_LOCATIONS = "False"
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
USER_MAPPING = {"JellyUser":"jellyplex_watched"}
|
||||
|
||||
## Map libraries between servers in the even that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma seperated for multiple options
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = "Movies"
|
||||
#BLACKLIST_LIBRARY_TYPE = "Series"
|
||||
#WHITELIST_LIBRARY_TYPE = "Movies, movie"
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "jellyplex_watched"
|
||||
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "https://localhost:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_TOKEN = "mVaCzSyd78uoWkCBzZ_Y"
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma seperated for multiple options
|
||||
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "True"
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||
96
test/ci3.env
Normal file
96
test/ci3.env
Normal file
@@ -0,0 +1,96 @@
|
||||
# Global Settings
|
||||
|
||||
## Do not mark any shows/movies as played and instead just output to log if they would of been marked.
|
||||
DRYRUN = "False"
|
||||
|
||||
## Additional logging information
|
||||
DEBUG = "True"
|
||||
|
||||
## Debugging level, "info" is default, "debug" is more verbose
|
||||
DEBUG_LEVEL = "debug"
|
||||
|
||||
## If set to true then the script will only run once and then exit
|
||||
RUN_ONLY_ONCE = "True"
|
||||
|
||||
## How often to run the script in seconds
|
||||
SLEEP_DURATION = 10
|
||||
|
||||
## Log file where all output will be written to
|
||||
LOG_FILE = "log.log"
|
||||
|
||||
## Mark file where all shows/movies that have been marked as played will be written to
|
||||
MARK_FILE = "mark.log"
|
||||
|
||||
## Timeout for requests for jellyfin
|
||||
REQUEST_TIMEOUT = 300
|
||||
|
||||
## Max threads for processing
|
||||
MAX_THREADS = 2
|
||||
|
||||
## Generate guids
|
||||
## Generating guids is a slow process, so this is a way to speed up the process
|
||||
# by using the location only, useful when using same files on multiple servers
|
||||
GENERATE_GUIDS = "True"
|
||||
|
||||
## Generate locations
|
||||
## Generating locations is a slow process, so this is a way to speed up the process
|
||||
## by using the guid only, useful when using different files on multiple servers
|
||||
GENERATE_LOCATIONS = "True"
|
||||
|
||||
## Map usernames between servers in the event that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
USER_MAPPING = {"JellyUser":"jellyplex_watched"}
|
||||
|
||||
## Map libraries between servers in the even that they are different, order does not matter
|
||||
## Comma seperated for multiple options
|
||||
LIBRARY_MAPPING = { "Shows": "TV Shows" }
|
||||
|
||||
|
||||
## Blacklisting/Whitelisting libraries, library types such as Movies/TV Shows, and users. Mappings apply so if the mapping for the user or library exist then both will be excluded.
|
||||
## Comma seperated for multiple options
|
||||
#BLACKLIST_LIBRARY = ""
|
||||
#WHITELIST_LIBRARY = "Movies"
|
||||
#BLACKLIST_LIBRARY_TYPE = "Series"
|
||||
#WHITELIST_LIBRARY_TYPE = "Movies, movie"
|
||||
#BLACKLIST_USERS = ""
|
||||
WHITELIST_USERS = "jellyplex_watched"
|
||||
|
||||
|
||||
|
||||
# Plex
|
||||
|
||||
## Recommended to use token as it is faster to connect as it is direct to the server instead of going through the plex servers
|
||||
## URL of the plex server, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_BASEURL = "https://localhost:32400"
|
||||
|
||||
## Plex token https://support.plex.tv/articles/204059436-finding-an-authentication-token-x-plex-token/
|
||||
## Comma seperated list for multiple servers
|
||||
PLEX_TOKEN = "mVaCzSyd78uoWkCBzZ_Y"
|
||||
|
||||
## If not using plex token then use username and password of the server admin along with the servername
|
||||
## Comma seperated for multiple options
|
||||
#PLEX_USERNAME = "PlexUser, PlexUser2"
|
||||
#PLEX_PASSWORD = "SuperSecret, SuperSecret2"
|
||||
#PLEX_SERVERNAME = "Plex Server1, Plex Server2"
|
||||
|
||||
## Skip hostname validation for ssl certificates.
|
||||
## Set to True if running into ssl certificate errors
|
||||
SSL_BYPASS = "True"
|
||||
|
||||
## control the direction of syncing. e.g. SYNC_FROM_PLEX_TO_JELLYFIN set to true will cause the updates from plex
|
||||
## to be updated in jellyfin. SYNC_FROM_PLEX_TO_PLEX set to true will sync updates between multiple plex servers
|
||||
SYNC_FROM_PLEX_TO_JELLYFIN = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_PLEX = "True"
|
||||
SYNC_FROM_PLEX_TO_PLEX = "True"
|
||||
SYNC_FROM_JELLYFIN_TO_JELLYFIN = "True"
|
||||
|
||||
# Jellyfin
|
||||
|
||||
## Jellyfin server URL, use hostname or IP address if the hostname is not resolving correctly
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_BASEURL = "http://localhost:8096"
|
||||
|
||||
## Jellyfin api token, created manually by logging in to the jellyfin server admin dashboard and creating an api key
|
||||
## Comma seperated list for multiple servers
|
||||
JELLYFIN_TOKEN = "d773c4db3ecc4b028fc0904d9694804c"
|
||||
@@ -83,6 +83,16 @@ episode_titles = {
|
||||
"tvdb": ["8444132"],
|
||||
"completed": [True],
|
||||
"time": [0],
|
||||
"season": ["Season 1"],
|
||||
"show": [
|
||||
{
|
||||
"imdb": "tt3581920",
|
||||
"locations": ("The Last of Us",),
|
||||
"title": "The Last of Us",
|
||||
"tmdb": "100088",
|
||||
"tvdb": "392256",
|
||||
}
|
||||
],
|
||||
}
|
||||
movie_titles = {
|
||||
"imdb": ["tt2380307"],
|
||||
|
||||
@@ -18,102 +18,225 @@ from src.watched import cleanup_watched, combine_watched_dicts
|
||||
tv_shows_watched_list_1 = {
|
||||
frozenset(
|
||||
{
|
||||
("tvdb", "75710"),
|
||||
("title", "Criminal Minds"),
|
||||
("imdb", "tt0452046"),
|
||||
("locations", ("Criminal Minds",)),
|
||||
("tmdb", "4057"),
|
||||
("locations", ("Doctor Who (2005) {tvdb-78804} {imdb-tt0436992}",)),
|
||||
("imdb", "tt0436992"),
|
||||
("tmdb", "57243"),
|
||||
("tvdb", "78804"),
|
||||
("title", "Doctor Who (2005)"),
|
||||
}
|
||||
): {
|
||||
"Season 1": [
|
||||
1: [
|
||||
{
|
||||
"imdb": "tt0550489",
|
||||
"tmdb": "282843",
|
||||
"tvdb": "176357",
|
||||
"title": "Extreme Aggressor",
|
||||
"locations": (
|
||||
"Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",
|
||||
),
|
||||
"imdb": "tt0563001",
|
||||
"tmdb": "968589",
|
||||
"tvdb": "295296",
|
||||
"title": "The Unquiet Dead",
|
||||
"locations": ("S01E03.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"imdb": "tt0550487",
|
||||
"tmdb": "282861",
|
||||
"tvdb": "300385",
|
||||
"title": "Compulsion",
|
||||
"locations": ("Criminal Minds S01E02 Compulsion WEBDL-720p.mkv",),
|
||||
"imdb": "tt0562985",
|
||||
"tmdb": "968590",
|
||||
"tvdb": "295297",
|
||||
"title": "Aliens of London (1)",
|
||||
"locations": ("S01E04.mkv",),
|
||||
"status": {"completed": False, "time": 240000},
|
||||
},
|
||||
{
|
||||
"imdb": "tt0563003",
|
||||
"tmdb": "968592",
|
||||
"tvdb": "295298",
|
||||
"title": "World War Three (2)",
|
||||
"locations": ("S01E05.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [
|
||||
frozenset(
|
||||
{
|
||||
"title": "S01E01",
|
||||
"locations": ("Test S01E01.mkv",),
|
||||
("title", "Monarch: Legacy of Monsters"),
|
||||
("imdb", "tt17220216"),
|
||||
("tvdb", "422598"),
|
||||
("tmdb", "202411"),
|
||||
(
|
||||
"locations",
|
||||
("Monarch - Legacy of Monsters {tvdb-422598} {imdb-tt17220216}",),
|
||||
),
|
||||
}
|
||||
): {
|
||||
1: [
|
||||
{
|
||||
"imdb": "tt21255044",
|
||||
"tmdb": "4661246",
|
||||
"tvdb": "10009418",
|
||||
"title": "Secrets and Lies",
|
||||
"locations": ("S01E03.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"title": "S01E02",
|
||||
"locations": ("Test S01E02.mkv",),
|
||||
"imdb": "tt21255050",
|
||||
"tmdb": "4712059",
|
||||
"tvdb": "10009419",
|
||||
"title": "Parallels and Interiors",
|
||||
"locations": ("S01E04.mkv",),
|
||||
"status": {"completed": False, "time": 240000},
|
||||
},
|
||||
{
|
||||
"imdb": "tt23787572",
|
||||
"tmdb": "4712061",
|
||||
"tvdb": "10009420",
|
||||
"title": "The Way Out",
|
||||
"locations": ("S01E05.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
]
|
||||
},
|
||||
frozenset(
|
||||
{
|
||||
("tmdb", "125928"),
|
||||
("imdb", "tt14681924"),
|
||||
("tvdb", "403172"),
|
||||
(
|
||||
"locations",
|
||||
("My Adventures with Superman {tvdb-403172} {imdb-tt14681924}",),
|
||||
),
|
||||
("title", "My Adventures with Superman"),
|
||||
}
|
||||
): {
|
||||
1: [
|
||||
{
|
||||
"imdb": "tt15699926",
|
||||
"tmdb": "3070048",
|
||||
"tvdb": "8438181",
|
||||
"title": "Adventures of a Normal Man (1)",
|
||||
"locations": ("S01E01.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"title": "S01E04",
|
||||
"locations": ("Test S01E04.mkv",),
|
||||
"status": {"completed": False, "time": 5},
|
||||
"imdb": "tt20413322",
|
||||
"tmdb": "4568681",
|
||||
"tvdb": "9829910",
|
||||
"title": "Adventures of a Normal Man (2)",
|
||||
"locations": ("S01E02.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"imdb": "tt20413328",
|
||||
"tmdb": "4497012",
|
||||
"tvdb": "9870382",
|
||||
"title": "My Interview with Superman",
|
||||
"locations": ("S01E03.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
]
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
tv_shows_watched_list_2 = {
|
||||
frozenset(
|
||||
{
|
||||
("tvdb", "75710"),
|
||||
("title", "Criminal Minds"),
|
||||
("imdb", "tt0452046"),
|
||||
("locations", ("Criminal Minds",)),
|
||||
("tmdb", "4057"),
|
||||
("locations", ("Doctor Who (2005) {tvdb-78804} {imdb-tt0436992}",)),
|
||||
("imdb", "tt0436992"),
|
||||
("tmdb", "57243"),
|
||||
("title", "Doctor Who"),
|
||||
("tvdb", "78804"),
|
||||
("tvrage", "3332"),
|
||||
}
|
||||
): {
|
||||
"Season 1": [
|
||||
1: [
|
||||
{
|
||||
"imdb": "tt0550487",
|
||||
"tmdb": "282861",
|
||||
"tvdb": "300385",
|
||||
"title": "Compulsion",
|
||||
"locations": ("Criminal Minds S01E02 Compulsion WEBDL-720p.mkv",),
|
||||
"tvdb": "295294",
|
||||
"imdb": "tt0562992",
|
||||
"title": "Rose",
|
||||
"locations": ("S01E01.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"imdb": "tt0550498",
|
||||
"tmdb": "282865",
|
||||
"tvdb": "300474",
|
||||
"title": "Won't Get Fooled Again",
|
||||
"locations": (
|
||||
"Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",
|
||||
),
|
||||
"tvdb": "295295",
|
||||
"imdb": "tt0562997",
|
||||
"title": "The End of the World",
|
||||
"locations": ("S01E02.mkv",),
|
||||
"status": {"completed": False, "time": 300670},
|
||||
},
|
||||
{
|
||||
"tvdb": "295298",
|
||||
"imdb": "tt0563003",
|
||||
"title": "World War Three (2)",
|
||||
"locations": ("S01E05.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [
|
||||
frozenset(
|
||||
{
|
||||
"title": "S01E02",
|
||||
"locations": ("Test S01E02.mkv",),
|
||||
"status": {"completed": False, "time": 10},
|
||||
},
|
||||
("title", "Monarch: Legacy of Monsters"),
|
||||
("imdb", "tt17220216"),
|
||||
("tvdb", "422598"),
|
||||
("tmdb", "202411"),
|
||||
(
|
||||
"locations",
|
||||
("Monarch - Legacy of Monsters {tvdb-422598} {imdb-tt17220216}",),
|
||||
),
|
||||
}
|
||||
): {
|
||||
1: [
|
||||
{
|
||||
"title": "S01E03",
|
||||
"locations": ("Test S01E03.mkv",),
|
||||
"tvdb": "9959300",
|
||||
"imdb": "tt20412166",
|
||||
"title": "Aftermath",
|
||||
"locations": ("S01E01.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"title": "S01E04",
|
||||
"locations": ("Test S01E04.mkv",),
|
||||
"status": {"completed": False, "time": 10},
|
||||
"tvdb": "10009417",
|
||||
"imdb": "tt22866594",
|
||||
"title": "Departure",
|
||||
"locations": ("S01E02.mkv",),
|
||||
"status": {"completed": False, "time": 300741},
|
||||
},
|
||||
{
|
||||
"tvdb": "10009420",
|
||||
"imdb": "tt23787572",
|
||||
"title": "The Way Out",
|
||||
"locations": ("S01E05.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
]
|
||||
},
|
||||
frozenset(
|
||||
{
|
||||
("tmdb", "125928"),
|
||||
("imdb", "tt14681924"),
|
||||
("tvdb", "403172"),
|
||||
(
|
||||
"locations",
|
||||
("My Adventures with Superman {tvdb-403172} {imdb-tt14681924}",),
|
||||
),
|
||||
("title", "My Adventures with Superman"),
|
||||
}
|
||||
): {
|
||||
1: [
|
||||
{
|
||||
"tvdb": "8438181",
|
||||
"imdb": "tt15699926",
|
||||
"title": "Adventures of a Normal Man (1)",
|
||||
"locations": ("S01E01.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"tvdb": "9829910",
|
||||
"imdb": "tt20413322",
|
||||
"title": "Adventures of a Normal Man (2)",
|
||||
"locations": ("S01E02.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"tvdb": "9870382",
|
||||
"imdb": "tt20413328",
|
||||
"title": "My Interview with Superman",
|
||||
"locations": ("S01E03.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
]
|
||||
},
|
||||
@@ -122,37 +245,60 @@ tv_shows_watched_list_2 = {
|
||||
expected_tv_show_watched_list_1 = {
|
||||
frozenset(
|
||||
{
|
||||
("tvdb", "75710"),
|
||||
("title", "Criminal Minds"),
|
||||
("imdb", "tt0452046"),
|
||||
("locations", ("Criminal Minds",)),
|
||||
("tmdb", "4057"),
|
||||
("locations", ("Doctor Who (2005) {tvdb-78804} {imdb-tt0436992}",)),
|
||||
("imdb", "tt0436992"),
|
||||
("tmdb", "57243"),
|
||||
("tvdb", "78804"),
|
||||
("title", "Doctor Who (2005)"),
|
||||
}
|
||||
): {
|
||||
"Season 1": [
|
||||
1: [
|
||||
{
|
||||
"imdb": "tt0550489",
|
||||
"tmdb": "282843",
|
||||
"tvdb": "176357",
|
||||
"title": "Extreme Aggressor",
|
||||
"locations": (
|
||||
"Criminal Minds S01E01 Extreme Aggressor WEBDL-720p.mkv",
|
||||
),
|
||||
"imdb": "tt0563001",
|
||||
"tmdb": "968589",
|
||||
"tvdb": "295296",
|
||||
"title": "The Unquiet Dead",
|
||||
"locations": ("S01E03.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
}
|
||||
},
|
||||
{
|
||||
"imdb": "tt0562985",
|
||||
"tmdb": "968590",
|
||||
"tvdb": "295297",
|
||||
"title": "Aliens of London (1)",
|
||||
"locations": ("S01E04.mkv",),
|
||||
"status": {"completed": False, "time": 240000},
|
||||
},
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [
|
||||
frozenset(
|
||||
{
|
||||
"title": "S01E01",
|
||||
"locations": ("Test S01E01.mkv",),
|
||||
("title", "Monarch: Legacy of Monsters"),
|
||||
("imdb", "tt17220216"),
|
||||
("tvdb", "422598"),
|
||||
("tmdb", "202411"),
|
||||
(
|
||||
"locations",
|
||||
("Monarch - Legacy of Monsters {tvdb-422598} {imdb-tt17220216}",),
|
||||
),
|
||||
}
|
||||
): {
|
||||
1: [
|
||||
{
|
||||
"imdb": "tt21255044",
|
||||
"tmdb": "4661246",
|
||||
"tvdb": "10009418",
|
||||
"title": "Secrets and Lies",
|
||||
"locations": ("S01E03.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"title": "S01E02",
|
||||
"locations": ("Test S01E02.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
"imdb": "tt21255050",
|
||||
"tmdb": "4712059",
|
||||
"tvdb": "10009419",
|
||||
"title": "Parallels and Interiors",
|
||||
"locations": ("S01E04.mkv",),
|
||||
"status": {"completed": False, "time": 240000},
|
||||
},
|
||||
]
|
||||
},
|
||||
@@ -161,37 +307,57 @@ expected_tv_show_watched_list_1 = {
|
||||
expected_tv_show_watched_list_2 = {
|
||||
frozenset(
|
||||
{
|
||||
("tvdb", "75710"),
|
||||
("title", "Criminal Minds"),
|
||||
("imdb", "tt0452046"),
|
||||
("locations", ("Criminal Minds",)),
|
||||
("tmdb", "4057"),
|
||||
("locations", ("Doctor Who (2005) {tvdb-78804} {imdb-tt0436992}",)),
|
||||
("imdb", "tt0436992"),
|
||||
("tmdb", "57243"),
|
||||
("title", "Doctor Who"),
|
||||
("tvdb", "78804"),
|
||||
("tvrage", "3332"),
|
||||
}
|
||||
): {
|
||||
"Season 1": [
|
||||
1: [
|
||||
{
|
||||
"imdb": "tt0550498",
|
||||
"tmdb": "282865",
|
||||
"tvdb": "300474",
|
||||
"title": "Won't Get Fooled Again",
|
||||
"locations": (
|
||||
"Criminal Minds S01E03 Won't Get Fooled Again WEBDL-720p.mkv",
|
||||
),
|
||||
"tvdb": "295294",
|
||||
"imdb": "tt0562992",
|
||||
"title": "Rose",
|
||||
"locations": ("S01E01.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
}
|
||||
},
|
||||
{
|
||||
"tvdb": "295295",
|
||||
"imdb": "tt0562997",
|
||||
"title": "The End of the World",
|
||||
"locations": ("S01E02.mkv",),
|
||||
"status": {"completed": False, "time": 300670},
|
||||
},
|
||||
]
|
||||
},
|
||||
frozenset({("title", "Test"), ("locations", ("Test",))}): {
|
||||
"Season 1": [
|
||||
frozenset(
|
||||
{
|
||||
"title": "S01E03",
|
||||
"locations": ("Test S01E03.mkv",),
|
||||
("title", "Monarch: Legacy of Monsters"),
|
||||
("imdb", "tt17220216"),
|
||||
("tvdb", "422598"),
|
||||
("tmdb", "202411"),
|
||||
(
|
||||
"locations",
|
||||
("Monarch - Legacy of Monsters {tvdb-422598} {imdb-tt17220216}",),
|
||||
),
|
||||
}
|
||||
): {
|
||||
1: [
|
||||
{
|
||||
"tvdb": "9959300",
|
||||
"imdb": "tt20412166",
|
||||
"title": "Aftermath",
|
||||
"locations": ("S01E01.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"title": "S01E04",
|
||||
"locations": ("Test S01E04.mkv",),
|
||||
"status": {"completed": False, "time": 10},
|
||||
"tvdb": "10009417",
|
||||
"imdb": "tt22866594",
|
||||
"title": "Departure",
|
||||
"locations": ("S01E02.mkv",),
|
||||
"status": {"completed": False, "time": 300741},
|
||||
},
|
||||
]
|
||||
},
|
||||
@@ -199,61 +365,92 @@ expected_tv_show_watched_list_2 = {
|
||||
|
||||
movies_watched_list_1 = [
|
||||
{
|
||||
"imdb": "tt2380307",
|
||||
"tmdb": "354912",
|
||||
"title": "Coco",
|
||||
"locations": ("Coco (2017) Remux-1080p.mkv",),
|
||||
"imdb": "tt1254207",
|
||||
"tmdb": "10378",
|
||||
"tvdb": "12352",
|
||||
"title": "Big Buck Bunny",
|
||||
"locations": ("Big Buck Bunny.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"tmdbcollection": "448150",
|
||||
"imdb": "tt1431045",
|
||||
"tmdb": "293660",
|
||||
"title": "Deadpool",
|
||||
"locations": ("Deadpool (2016) Remux-1080p.mkv",),
|
||||
"imdb": "tt16431870",
|
||||
"tmdb": "1029575",
|
||||
"tvdb": "351194",
|
||||
"title": "The Family Plan",
|
||||
"locations": ("The Family Plan (2023).mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"imdb": "tt5537002",
|
||||
"tmdb": "466420",
|
||||
"tvdb": "135852",
|
||||
"title": "Killers of the Flower Moon",
|
||||
"locations": ("Killers of the Flower Moon (2023).mkv",),
|
||||
"status": {"completed": False, "time": 240000},
|
||||
},
|
||||
]
|
||||
|
||||
movies_watched_list_2 = [
|
||||
{
|
||||
"imdb": "tt2380307",
|
||||
"tmdb": "354912",
|
||||
"title": "Coco",
|
||||
"locations": ("Coco (2017) Remux-1080p.mkv",),
|
||||
"imdb": "tt16431870",
|
||||
"tmdb": "1029575",
|
||||
"title": "The Family Plan",
|
||||
"locations": ("The Family Plan (2023).mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"imdb": "tt0384793",
|
||||
"tmdb": "9788",
|
||||
"tvdb": "9103",
|
||||
"title": "Accepted",
|
||||
"locations": ("Accepted (2006) Remux-1080p.mkv",),
|
||||
"imdb": "tt4589218",
|
||||
"tmdb": "507089",
|
||||
"title": "Five Nights at Freddy's",
|
||||
"locations": ("Five Nights at Freddy's (2023).mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
},
|
||||
{
|
||||
"imdb": "tt10545296",
|
||||
"tmdb": "695721",
|
||||
"tmdbcollection": "131635",
|
||||
"title": "The Hunger Games: The Ballad of Songbirds & Snakes",
|
||||
"locations": ("The Hunger Games The Ballad of Songbirds & Snakes (2023).mkv",),
|
||||
"status": {"completed": False, "time": 301215},
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
expected_movie_watched_list_1 = [
|
||||
{
|
||||
"tmdbcollection": "448150",
|
||||
"imdb": "tt1431045",
|
||||
"tmdb": "293660",
|
||||
"title": "Deadpool",
|
||||
"locations": ("Deadpool (2016) Remux-1080p.mkv",),
|
||||
"imdb": "tt1254207",
|
||||
"tmdb": "10378",
|
||||
"tvdb": "12352",
|
||||
"title": "Big Buck Bunny",
|
||||
"locations": ("Big Buck Bunny.mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
}
|
||||
},
|
||||
{
|
||||
"imdb": "tt5537002",
|
||||
"tmdb": "466420",
|
||||
"tvdb": "135852",
|
||||
"title": "Killers of the Flower Moon",
|
||||
"locations": ("Killers of the Flower Moon (2023).mkv",),
|
||||
"status": {"completed": False, "time": 240000},
|
||||
},
|
||||
]
|
||||
|
||||
expected_movie_watched_list_2 = [
|
||||
{
|
||||
"imdb": "tt0384793",
|
||||
"tmdb": "9788",
|
||||
"tvdb": "9103",
|
||||
"title": "Accepted",
|
||||
"locations": ("Accepted (2006) Remux-1080p.mkv",),
|
||||
"imdb": "tt4589218",
|
||||
"tmdb": "507089",
|
||||
"title": "Five Nights at Freddy's",
|
||||
"locations": ("Five Nights at Freddy's (2023).mkv",),
|
||||
"status": {"completed": True, "time": 0},
|
||||
}
|
||||
},
|
||||
{
|
||||
"imdb": "tt10545296",
|
||||
"tmdb": "695721",
|
||||
"tmdbcollection": "131635",
|
||||
"title": "The Hunger Games: The Ballad of Songbirds & Snakes",
|
||||
"locations": ("The Hunger Games The Ballad of Songbirds & Snakes (2023).mkv",),
|
||||
"status": {"completed": False, "time": 301215},
|
||||
},
|
||||
]
|
||||
|
||||
# Test to see if objects get deleted all the way up to the root.
|
||||
|
||||
76
test/validate_ci_marklog.py
Normal file
76
test/validate_ci_marklog.py
Normal file
@@ -0,0 +1,76 @@
|
||||
# Check the mark.log file that is generated by the CI to make sure it contains the expected values
|
||||
|
||||
import os
|
||||
|
||||
|
||||
def read_marklog():
|
||||
marklog = os.path.join(os.getcwd(), "mark.log")
|
||||
with open(marklog, "r") as f:
|
||||
lines = f.readlines()
|
||||
return lines
|
||||
|
||||
|
||||
def check_marklog(lines, expected_values):
|
||||
try:
|
||||
# Check to make sure the marklog contains all the expected values and nothing else
|
||||
found_values = []
|
||||
for line in lines:
|
||||
# Remove the newline character
|
||||
line = line.strip()
|
||||
if line not in expected_values:
|
||||
raise Exception("Line not found in marklog: " + line)
|
||||
|
||||
found_values.append(line)
|
||||
|
||||
# Check to make sure the marklog contains the same number of values as the expected values
|
||||
if len(found_values) != len(expected_values):
|
||||
raise Exception(
|
||||
"Marklog did not contain the same number of values as the expected values, found "
|
||||
+ str(len(found_values))
|
||||
+ " values, expected "
|
||||
+ str(len(expected_values))
|
||||
+ " values"
|
||||
)
|
||||
|
||||
# Check that the two lists contain the same values
|
||||
if sorted(found_values) != sorted(expected_values):
|
||||
raise Exception(
|
||||
"Marklog did not contain the same values as the expected values, found:\n"
|
||||
+ "\n".join(sorted(found_values))
|
||||
+ "\n\nExpected:\n"
|
||||
+ "\n".join(sorted(expected_values))
|
||||
)
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
expected_values = [
|
||||
"jellyplex_watched/Movies/Five Nights at Freddy's",
|
||||
"jellyplex_watched/Movies/The Hunger Games: The Ballad of Songbirds & Snakes/301215",
|
||||
"jellyplex_watched/TV Shows/Doctor Who (2005)/Rose",
|
||||
"jellyplex_watched/TV Shows/Doctor Who (2005)/The End of the World/300670",
|
||||
"jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Aftermath",
|
||||
"jellyplex_watched/TV Shows/Monarch: Legacy of Monsters/Departure/300741",
|
||||
"JellyUser/Movies/Big Buck Bunny",
|
||||
"JellyUser/Shows/Doctor Who/The Unquiet Dead",
|
||||
"JellyUser/Shows/Monarch: Legacy of Monsters/Secrets and Lies",
|
||||
]
|
||||
|
||||
# Triple the expected values because the CI runs three times
|
||||
expected_values = expected_values * 3
|
||||
|
||||
lines = read_marklog()
|
||||
if not check_marklog(lines, expected_values):
|
||||
print("Failed to validate marklog")
|
||||
exit(1)
|
||||
|
||||
print("Successfully validated marklog")
|
||||
exit(0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user